file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
mod_dir_path.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(unused_macros)]
// ignore-pretty issue #37195
mod mod_dir_simple {
#[path = "test.rs"]
pub mod syrup;
}
pub fn | () {
assert_eq!(mod_dir_simple::syrup::foo(), 10);
#[path = "auxiliary"]
mod foo {
mod two_macros_2;
}
#[path = "auxiliary"]
mod bar {
macro_rules! m { () => { mod two_macros_2; } }
m!();
}
}
| main | identifier_name |
mod_dir_path.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(unused_macros)]
// ignore-pretty issue #37195
mod mod_dir_simple {
#[path = "test.rs"]
pub mod syrup;
} | mod foo {
mod two_macros_2;
}
#[path = "auxiliary"]
mod bar {
macro_rules! m { () => { mod two_macros_2; } }
m!();
}
} |
pub fn main() {
assert_eq!(mod_dir_simple::syrup::foo(), 10);
#[path = "auxiliary"] | random_line_split |
regions-close-associated-type-into-object.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(box_syntax)]
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
trait X {}
trait Iter {
type Item: X;
fn into_item(self) -> Self::Item;
fn as_item(&self) -> &Self::Item;
}
fn bad1<T: Iter>(v: T) -> Box<X+'static>
{
let item = v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad2<T: Iter>(v: T) -> Box<X+'static>
where Box<T::Item> : X
{
let item: Box<_> = box v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad3<'a, T: Iter>(v: T) -> Box<X+'a>
{
let item = v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad4<'a, T: Iter>(v: T) -> Box<X+'a>
where Box<T::Item> : X
{
let item: Box<_> = box v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn | <'a, T: Iter>(v: T) -> Box<X+'a>
where T::Item : 'a
{
let item = v.into_item();
Box::new(item) // OK, T::Item : 'a is declared
}
fn ok2<'a, T: Iter>(v: &T, w: &'a T::Item) -> Box<X+'a>
where T::Item : Clone
{
let item = Clone::clone(w);
Box::new(item) // OK, T::Item : 'a is implied
}
fn ok3<'a, T: Iter>(v: &'a T) -> Box<X+'a>
where T::Item : Clone + 'a
{
let item = Clone::clone(v.as_item());
Box::new(item) // OK, T::Item : 'a was declared
}
fn meh1<'a, T: Iter>(v: &'a T) -> Box<X+'a>
where T::Item : Clone
{
// This case is kind of interesting. It's the same as `ok3` but
// without the explicit declaration. This is valid because `T: 'a
// => T::Item: 'a`, and the former we can deduce from our argument
// of type `&'a T`.
let item = Clone::clone(v.as_item());
Box::new(item)
}
fn main() {}
| ok1 | identifier_name |
regions-close-associated-type-into-object.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(box_syntax)]
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
trait X {}
trait Iter {
type Item: X;
fn into_item(self) -> Self::Item;
fn as_item(&self) -> &Self::Item;
}
fn bad1<T: Iter>(v: T) -> Box<X+'static>
{
let item = v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad2<T: Iter>(v: T) -> Box<X+'static>
where Box<T::Item> : X
{
let item: Box<_> = box v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad3<'a, T: Iter>(v: T) -> Box<X+'a>
{
let item = v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad4<'a, T: Iter>(v: T) -> Box<X+'a>
where Box<T::Item> : X
{
let item: Box<_> = box v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn ok1<'a, T: Iter>(v: T) -> Box<X+'a>
where T::Item : 'a
|
fn ok2<'a, T: Iter>(v: &T, w: &'a T::Item) -> Box<X+'a>
where T::Item : Clone
{
let item = Clone::clone(w);
Box::new(item) // OK, T::Item : 'a is implied
}
fn ok3<'a, T: Iter>(v: &'a T) -> Box<X+'a>
where T::Item : Clone + 'a
{
let item = Clone::clone(v.as_item());
Box::new(item) // OK, T::Item : 'a was declared
}
fn meh1<'a, T: Iter>(v: &'a T) -> Box<X+'a>
where T::Item : Clone
{
// This case is kind of interesting. It's the same as `ok3` but
// without the explicit declaration. This is valid because `T: 'a
// => T::Item: 'a`, and the former we can deduce from our argument
// of type `&'a T`.
let item = Clone::clone(v.as_item());
Box::new(item)
}
fn main() {}
| {
let item = v.into_item();
Box::new(item) // OK, T::Item : 'a is declared
} | identifier_body |
regions-close-associated-type-into-object.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(box_syntax)]
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
trait X {}
trait Iter {
type Item: X;
fn into_item(self) -> Self::Item;
fn as_item(&self) -> &Self::Item;
}
fn bad1<T: Iter>(v: T) -> Box<X+'static>
{
let item = v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad2<T: Iter>(v: T) -> Box<X+'static>
where Box<T::Item> : X
{
let item: Box<_> = box v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad3<'a, T: Iter>(v: T) -> Box<X+'a>
{
let item = v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad4<'a, T: Iter>(v: T) -> Box<X+'a>
where Box<T::Item> : X
{
let item: Box<_> = box v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn ok1<'a, T: Iter>(v: T) -> Box<X+'a>
where T::Item : 'a
{
let item = v.into_item();
Box::new(item) // OK, T::Item : 'a is declared
}
fn ok2<'a, T: Iter>(v: &T, w: &'a T::Item) -> Box<X+'a> | let item = Clone::clone(w);
Box::new(item) // OK, T::Item : 'a is implied
}
fn ok3<'a, T: Iter>(v: &'a T) -> Box<X+'a>
where T::Item : Clone + 'a
{
let item = Clone::clone(v.as_item());
Box::new(item) // OK, T::Item : 'a was declared
}
fn meh1<'a, T: Iter>(v: &'a T) -> Box<X+'a>
where T::Item : Clone
{
// This case is kind of interesting. It's the same as `ok3` but
// without the explicit declaration. This is valid because `T: 'a
// => T::Item: 'a`, and the former we can deduce from our argument
// of type `&'a T`.
let item = Clone::clone(v.as_item());
Box::new(item)
}
fn main() {} | where T::Item : Clone
{ | random_line_split |
builder.rs | use num::{Integer, NumCast, Unsigned};
use std::hash::Hash;
use typenum::NonZero;
use crate::buffer::{BufferError, MeshBuffer};
use crate::builder::{FacetBuilder, MeshBuilder, SurfaceBuilder};
use crate::constant::{Constant, ToType, TypeOf};
use crate::geometry::{FromGeometry, IntoGeometry};
use crate::index::{Flat, Grouping, IndexBuffer};
use crate::primitive::Topological;
use crate::transact::{ClosedInput, Transact};
use crate::Arity;
// TODO: It should not be possible to manufacture keys without placing
// additional constraints on the type bounds of `FacetBuilder` (for
// example, `FacetBuilder<Key = usize>`). Is it important to check for
// out-of-bounds indices in `insert_facet`?
pub type VertexKey<R> = <Vec<<R as Grouping>::Group> as IndexBuffer<R>>::Index;
pub struct BufferBuilder<R, G>
where
R: Grouping,
{
indices: Vec<R::Group>,
vertices: Vec<G>,
}
impl<R, G> Default for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
fn default() -> Self {
BufferBuilder {
indices: Default::default(),
vertices: Default::default(),
}
}
}
impl<R, G> ClosedInput for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Input = ();
}
impl<K, G, const N: usize> FacetBuilder<K> for BufferBuilder<Flat<K, N>, G>
where
Constant<N>: ToType,
TypeOf<N>: NonZero,
K: Copy + Hash + Integer + Unsigned,
Vec<K>: IndexBuffer<Flat<K, N>>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[K]>,
|
}
impl<P, G> FacetBuilder<P::Vertex> for BufferBuilder<P, G>
where
P: Grouping<Group = P> + Topological,
P::Vertex: Copy + Hash + Integer + Unsigned,
Vec<P>: IndexBuffer<P>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[P::Vertex]>,
{
let arity = keys.as_ref().len();
P::try_from_slice(keys)
.ok_or(BufferError::ArityConflict {
expected: P::ARITY.into_interval().0,
actual: arity,
})
.map(|polygon| self.indices.push(polygon))
}
}
impl<R, G> MeshBuilder for BufferBuilder<R, G>
where
Self: SurfaceBuilder<Vertex = G, Facet = ()>,
R: Grouping,
VertexKey<R>: Hash,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Vertex = G;
type Facet = ();
fn surface_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
}
impl<R, G> SurfaceBuilder for BufferBuilder<R, G>
where
Self: FacetBuilder<VertexKey<R>, Facet = ()>,
Self::Error: From<BufferError>, // TODO: Why is this necessary?
R: Grouping,
VertexKey<R>: Hash + NumCast,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Key = VertexKey<R>;
type Vertex = G;
type Facet = ();
fn facets_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
fn insert_vertex<T>(&mut self, data: T) -> Result<Self::Key, Self::Error>
where
Self::Vertex: FromGeometry<T>,
{
let key = <VertexKey<R> as NumCast>::from(self.vertices.len())
.ok_or(BufferError::IndexOverflow)?;
self.vertices.push(data.into_geometry());
Ok(key)
}
}
impl<R, G> Transact<<Self as ClosedInput>::Input> for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Commit = MeshBuffer<R, G>;
type Abort = ();
type Error = BufferError;
fn commit(self) -> Result<Self::Commit, (Self::Abort, Self::Error)> {
let BufferBuilder { indices, vertices } = self;
Ok(MeshBuffer::from_raw_buffers_unchecked(indices, vertices))
}
fn abort(self) -> Self::Abort {}
}
| {
let keys = keys.as_ref();
if keys.len() == N {
self.indices.extend(keys.iter());
Ok(())
}
else {
// TODO: These numbers do not necessarily represent arity (i.e., the
// number of edges of each topological structure). Use a
// different error variant to express this.
Err(BufferError::ArityConflict {
expected: N,
actual: keys.len(),
})
}
} | identifier_body |
builder.rs | use num::{Integer, NumCast, Unsigned};
use std::hash::Hash;
use typenum::NonZero;
use crate::buffer::{BufferError, MeshBuffer};
use crate::builder::{FacetBuilder, MeshBuilder, SurfaceBuilder};
use crate::constant::{Constant, ToType, TypeOf};
use crate::geometry::{FromGeometry, IntoGeometry};
use crate::index::{Flat, Grouping, IndexBuffer};
use crate::primitive::Topological;
use crate::transact::{ClosedInput, Transact};
use crate::Arity;
// TODO: It should not be possible to manufacture keys without placing
// additional constraints on the type bounds of `FacetBuilder` (for
// example, `FacetBuilder<Key = usize>`). Is it important to check for
// out-of-bounds indices in `insert_facet`?
pub type VertexKey<R> = <Vec<<R as Grouping>::Group> as IndexBuffer<R>>::Index;
pub struct BufferBuilder<R, G>
where
R: Grouping,
{
indices: Vec<R::Group>,
vertices: Vec<G>,
}
impl<R, G> Default for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
fn default() -> Self {
BufferBuilder {
indices: Default::default(),
vertices: Default::default(),
}
}
}
impl<R, G> ClosedInput for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Input = ();
}
impl<K, G, const N: usize> FacetBuilder<K> for BufferBuilder<Flat<K, N>, G>
where
Constant<N>: ToType,
TypeOf<N>: NonZero,
K: Copy + Hash + Integer + Unsigned,
Vec<K>: IndexBuffer<Flat<K, N>>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[K]>,
{
let keys = keys.as_ref();
if keys.len() == N {
self.indices.extend(keys.iter());
Ok(())
}
else {
// TODO: These numbers do not necessarily represent arity (i.e., the
// number of edges of each topological structure). Use a
// different error variant to express this.
Err(BufferError::ArityConflict {
expected: N,
actual: keys.len(),
})
}
}
}
impl<P, G> FacetBuilder<P::Vertex> for BufferBuilder<P, G>
where
P: Grouping<Group = P> + Topological,
P::Vertex: Copy + Hash + Integer + Unsigned,
Vec<P>: IndexBuffer<P>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[P::Vertex]>,
{
let arity = keys.as_ref().len();
P::try_from_slice(keys)
.ok_or(BufferError::ArityConflict {
expected: P::ARITY.into_interval().0,
actual: arity,
})
.map(|polygon| self.indices.push(polygon))
}
}
impl<R, G> MeshBuilder for BufferBuilder<R, G>
where
Self: SurfaceBuilder<Vertex = G, Facet = ()>,
R: Grouping,
VertexKey<R>: Hash,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Vertex = G;
type Facet = ();
fn surface_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
}
impl<R, G> SurfaceBuilder for BufferBuilder<R, G>
where
Self: FacetBuilder<VertexKey<R>, Facet = ()>,
Self::Error: From<BufferError>, // TODO: Why is this necessary?
R: Grouping,
VertexKey<R>: Hash + NumCast,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Key = VertexKey<R>;
type Vertex = G;
type Facet = ();
fn facets_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
fn insert_vertex<T>(&mut self, data: T) -> Result<Self::Key, Self::Error>
where
Self::Vertex: FromGeometry<T>,
{
let key = <VertexKey<R> as NumCast>::from(self.vertices.len())
.ok_or(BufferError::IndexOverflow)?;
self.vertices.push(data.into_geometry());
Ok(key)
}
}
impl<R, G> Transact<<Self as ClosedInput>::Input> for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Commit = MeshBuffer<R, G>;
type Abort = ();
type Error = BufferError;
fn commit(self) -> Result<Self::Commit, (Self::Abort, Self::Error)> {
let BufferBuilder { indices, vertices } = self;
Ok(MeshBuffer::from_raw_buffers_unchecked(indices, vertices))
}
fn | (self) -> Self::Abort {}
}
| abort | identifier_name |
builder.rs | use num::{Integer, NumCast, Unsigned};
use std::hash::Hash;
use typenum::NonZero;
use crate::buffer::{BufferError, MeshBuffer};
use crate::builder::{FacetBuilder, MeshBuilder, SurfaceBuilder};
use crate::constant::{Constant, ToType, TypeOf};
use crate::geometry::{FromGeometry, IntoGeometry};
use crate::index::{Flat, Grouping, IndexBuffer};
use crate::primitive::Topological;
use crate::transact::{ClosedInput, Transact};
use crate::Arity;
// TODO: It should not be possible to manufacture keys without placing
// additional constraints on the type bounds of `FacetBuilder` (for
// example, `FacetBuilder<Key = usize>`). Is it important to check for
// out-of-bounds indices in `insert_facet`?
pub type VertexKey<R> = <Vec<<R as Grouping>::Group> as IndexBuffer<R>>::Index;
pub struct BufferBuilder<R, G>
where
R: Grouping,
{
indices: Vec<R::Group>,
vertices: Vec<G>,
}
impl<R, G> Default for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
fn default() -> Self {
BufferBuilder {
indices: Default::default(),
vertices: Default::default(),
}
}
}
impl<R, G> ClosedInput for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Input = ();
}
impl<K, G, const N: usize> FacetBuilder<K> for BufferBuilder<Flat<K, N>, G>
where
Constant<N>: ToType,
TypeOf<N>: NonZero,
K: Copy + Hash + Integer + Unsigned,
Vec<K>: IndexBuffer<Flat<K, N>>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[K]>,
{
let keys = keys.as_ref();
if keys.len() == N |
else {
// TODO: These numbers do not necessarily represent arity (i.e., the
// number of edges of each topological structure). Use a
// different error variant to express this.
Err(BufferError::ArityConflict {
expected: N,
actual: keys.len(),
})
}
}
}
impl<P, G> FacetBuilder<P::Vertex> for BufferBuilder<P, G>
where
P: Grouping<Group = P> + Topological,
P::Vertex: Copy + Hash + Integer + Unsigned,
Vec<P>: IndexBuffer<P>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[P::Vertex]>,
{
let arity = keys.as_ref().len();
P::try_from_slice(keys)
.ok_or(BufferError::ArityConflict {
expected: P::ARITY.into_interval().0,
actual: arity,
})
.map(|polygon| self.indices.push(polygon))
}
}
impl<R, G> MeshBuilder for BufferBuilder<R, G>
where
Self: SurfaceBuilder<Vertex = G, Facet = ()>,
R: Grouping,
VertexKey<R>: Hash,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Vertex = G;
type Facet = ();
fn surface_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
}
impl<R, G> SurfaceBuilder for BufferBuilder<R, G>
where
Self: FacetBuilder<VertexKey<R>, Facet = ()>,
Self::Error: From<BufferError>, // TODO: Why is this necessary?
R: Grouping,
VertexKey<R>: Hash + NumCast,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Key = VertexKey<R>;
type Vertex = G;
type Facet = ();
fn facets_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
fn insert_vertex<T>(&mut self, data: T) -> Result<Self::Key, Self::Error>
where
Self::Vertex: FromGeometry<T>,
{
let key = <VertexKey<R> as NumCast>::from(self.vertices.len())
.ok_or(BufferError::IndexOverflow)?;
self.vertices.push(data.into_geometry());
Ok(key)
}
}
impl<R, G> Transact<<Self as ClosedInput>::Input> for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Commit = MeshBuffer<R, G>;
type Abort = ();
type Error = BufferError;
fn commit(self) -> Result<Self::Commit, (Self::Abort, Self::Error)> {
let BufferBuilder { indices, vertices } = self;
Ok(MeshBuffer::from_raw_buffers_unchecked(indices, vertices))
}
fn abort(self) -> Self::Abort {}
}
| {
self.indices.extend(keys.iter());
Ok(())
} | conditional_block |
builder.rs | use crate::builder::{FacetBuilder, MeshBuilder, SurfaceBuilder};
use crate::constant::{Constant, ToType, TypeOf};
use crate::geometry::{FromGeometry, IntoGeometry};
use crate::index::{Flat, Grouping, IndexBuffer};
use crate::primitive::Topological;
use crate::transact::{ClosedInput, Transact};
use crate::Arity;
// TODO: It should not be possible to manufacture keys without placing
// additional constraints on the type bounds of `FacetBuilder` (for
// example, `FacetBuilder<Key = usize>`). Is it important to check for
// out-of-bounds indices in `insert_facet`?
pub type VertexKey<R> = <Vec<<R as Grouping>::Group> as IndexBuffer<R>>::Index;
pub struct BufferBuilder<R, G>
where
R: Grouping,
{
indices: Vec<R::Group>,
vertices: Vec<G>,
}
impl<R, G> Default for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
fn default() -> Self {
BufferBuilder {
indices: Default::default(),
vertices: Default::default(),
}
}
}
impl<R, G> ClosedInput for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Input = ();
}
impl<K, G, const N: usize> FacetBuilder<K> for BufferBuilder<Flat<K, N>, G>
where
Constant<N>: ToType,
TypeOf<N>: NonZero,
K: Copy + Hash + Integer + Unsigned,
Vec<K>: IndexBuffer<Flat<K, N>>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[K]>,
{
let keys = keys.as_ref();
if keys.len() == N {
self.indices.extend(keys.iter());
Ok(())
}
else {
// TODO: These numbers do not necessarily represent arity (i.e., the
// number of edges of each topological structure). Use a
// different error variant to express this.
Err(BufferError::ArityConflict {
expected: N,
actual: keys.len(),
})
}
}
}
impl<P, G> FacetBuilder<P::Vertex> for BufferBuilder<P, G>
where
P: Grouping<Group = P> + Topological,
P::Vertex: Copy + Hash + Integer + Unsigned,
Vec<P>: IndexBuffer<P>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[P::Vertex]>,
{
let arity = keys.as_ref().len();
P::try_from_slice(keys)
.ok_or(BufferError::ArityConflict {
expected: P::ARITY.into_interval().0,
actual: arity,
})
.map(|polygon| self.indices.push(polygon))
}
}
impl<R, G> MeshBuilder for BufferBuilder<R, G>
where
Self: SurfaceBuilder<Vertex = G, Facet = ()>,
R: Grouping,
VertexKey<R>: Hash,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Vertex = G;
type Facet = ();
fn surface_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
}
impl<R, G> SurfaceBuilder for BufferBuilder<R, G>
where
Self: FacetBuilder<VertexKey<R>, Facet = ()>,
Self::Error: From<BufferError>, // TODO: Why is this necessary?
R: Grouping,
VertexKey<R>: Hash + NumCast,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Key = VertexKey<R>;
type Vertex = G;
type Facet = ();
fn facets_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
fn insert_vertex<T>(&mut self, data: T) -> Result<Self::Key, Self::Error>
where
Self::Vertex: FromGeometry<T>,
{
let key = <VertexKey<R> as NumCast>::from(self.vertices.len())
.ok_or(BufferError::IndexOverflow)?;
self.vertices.push(data.into_geometry());
Ok(key)
}
}
impl<R, G> Transact<<Self as ClosedInput>::Input> for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Commit = MeshBuffer<R, G>;
type Abort = ();
type Error = BufferError;
fn commit(self) -> Result<Self::Commit, (Self::Abort, Self::Error)> {
let BufferBuilder { indices, vertices } = self;
Ok(MeshBuffer::from_raw_buffers_unchecked(indices, vertices))
}
fn abort(self) -> Self::Abort {}
} | use num::{Integer, NumCast, Unsigned};
use std::hash::Hash;
use typenum::NonZero;
use crate::buffer::{BufferError, MeshBuffer}; | random_line_split |
|
util.rs | use fastrand;
use std::ffi::{OsStr, OsString};
use std::path::{Path, PathBuf};
use std::{io, iter::repeat_with};
use crate::error::IoResultExt;
fn tmpname(prefix: &OsStr, suffix: &OsStr, rand_len: usize) -> OsString {
let mut buf = OsString::with_capacity(prefix.len() + suffix.len() + rand_len);
buf.push(prefix);
let mut char_buf = [0u8; 4];
for c in repeat_with(fastrand::alphanumeric).take(rand_len) {
buf.push(c.encode_utf8(&mut char_buf));
}
buf.push(suffix);
buf
}
pub fn | <F, R>(
base: &Path,
prefix: &OsStr,
suffix: &OsStr,
random_len: usize,
f: F,
) -> io::Result<R>
where
F: Fn(PathBuf) -> io::Result<R>,
{
let num_retries = if random_len!= 0 {
crate::NUM_RETRIES
} else {
1
};
for _ in 0..num_retries {
let path = base.join(tmpname(prefix, suffix, random_len));
return match f(path) {
Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => continue,
res => res,
};
}
Err(io::Error::new(
io::ErrorKind::AlreadyExists,
"too many temporary files exist",
))
.with_err_path(|| base)
}
| create_helper | identifier_name |
util.rs | use fastrand;
use std::ffi::{OsStr, OsString};
use std::path::{Path, PathBuf};
use std::{io, iter::repeat_with};
use crate::error::IoResultExt;
fn tmpname(prefix: &OsStr, suffix: &OsStr, rand_len: usize) -> OsString {
let mut buf = OsString::with_capacity(prefix.len() + suffix.len() + rand_len);
buf.push(prefix);
let mut char_buf = [0u8; 4];
for c in repeat_with(fastrand::alphanumeric).take(rand_len) {
buf.push(c.encode_utf8(&mut char_buf));
}
buf.push(suffix);
buf
}
pub fn create_helper<F, R>(
base: &Path,
prefix: &OsStr,
suffix: &OsStr,
random_len: usize,
f: F,
) -> io::Result<R>
where
F: Fn(PathBuf) -> io::Result<R>,
{
let num_retries = if random_len!= 0 {
crate::NUM_RETRIES
} else | ;
for _ in 0..num_retries {
let path = base.join(tmpname(prefix, suffix, random_len));
return match f(path) {
Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => continue,
res => res,
};
}
Err(io::Error::new(
io::ErrorKind::AlreadyExists,
"too many temporary files exist",
))
.with_err_path(|| base)
}
| {
1
} | conditional_block |
util.rs | use fastrand;
use std::ffi::{OsStr, OsString};
use std::path::{Path, PathBuf};
use std::{io, iter::repeat_with};
use crate::error::IoResultExt;
fn tmpname(prefix: &OsStr, suffix: &OsStr, rand_len: usize) -> OsString {
let mut buf = OsString::with_capacity(prefix.len() + suffix.len() + rand_len);
buf.push(prefix);
let mut char_buf = [0u8; 4];
for c in repeat_with(fastrand::alphanumeric).take(rand_len) {
buf.push(c.encode_utf8(&mut char_buf));
}
buf.push(suffix);
buf
}
pub fn create_helper<F, R>(
base: &Path,
prefix: &OsStr,
suffix: &OsStr,
random_len: usize,
f: F,
) -> io::Result<R>
where
F: Fn(PathBuf) -> io::Result<R>,
{
let num_retries = if random_len!= 0 {
crate::NUM_RETRIES
} else {
1
};
for _ in 0..num_retries {
let path = base.join(tmpname(prefix, suffix, random_len));
return match f(path) {
Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => continue,
res => res, | io::ErrorKind::AlreadyExists,
"too many temporary files exist",
))
.with_err_path(|| base)
} | };
}
Err(io::Error::new( | random_line_split |
util.rs | use fastrand;
use std::ffi::{OsStr, OsString};
use std::path::{Path, PathBuf};
use std::{io, iter::repeat_with};
use crate::error::IoResultExt;
fn tmpname(prefix: &OsStr, suffix: &OsStr, rand_len: usize) -> OsString {
let mut buf = OsString::with_capacity(prefix.len() + suffix.len() + rand_len);
buf.push(prefix);
let mut char_buf = [0u8; 4];
for c in repeat_with(fastrand::alphanumeric).take(rand_len) {
buf.push(c.encode_utf8(&mut char_buf));
}
buf.push(suffix);
buf
}
pub fn create_helper<F, R>(
base: &Path,
prefix: &OsStr,
suffix: &OsStr,
random_len: usize,
f: F,
) -> io::Result<R>
where
F: Fn(PathBuf) -> io::Result<R>,
| }
| {
let num_retries = if random_len != 0 {
crate::NUM_RETRIES
} else {
1
};
for _ in 0..num_retries {
let path = base.join(tmpname(prefix, suffix, random_len));
return match f(path) {
Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => continue,
res => res,
};
}
Err(io::Error::new(
io::ErrorKind::AlreadyExists,
"too many temporary files exist",
))
.with_err_path(|| base) | identifier_body |
lib.rs | self.ic = next_ic;
next_ic = self.chars.advance();
for i in range(0, clist.size) {
let pc = clist.pc(i);
let step_state = self.step(&mut groups, nlist,
clist.groups(i), pc);
match step_state {
StepMatchEarlyReturn =>
return vec![Some(0u), Some(0u)],
StepMatch => { matched = true; break },
StepContinue => {},
}
}
::std::mem::swap(&mut clist, &mut nlist);
nlist.empty();
}
match self.which {
Exists if matched => vec![Some(0u), Some(0u)],
Exists => vec![None, None],
Location | Submatches => groups.iter().map(|x| *x).collect(),
}
}
// Sometimes `nlist` is never used (for empty regexes).
#[allow(unused_variable)]
#[inline]
fn step(&self, groups: &mut Captures, nlist: &mut Threads,
caps: &mut Captures, pc: uint) -> StepState {
$step_insts
StepContinue
}
fn add(&self, nlist: &mut Threads, pc: uint,
groups: &mut Captures) {
if nlist.contains(pc) {
return
}
$add_insts
}
}
struct Thread {
pc: uint,
groups: Captures,
}
struct Threads {
which: MatchKind,
queue: [Thread,..$num_insts],
sparse: [uint,..$num_insts],
size: uint,
}
impl Threads {
fn new(which: MatchKind) -> Threads {
Threads {
which: which,
// These unsafe blocks are used for performance reasons, as it
// gives us a zero-cost initialization of a sparse set. The
// trick is described in more detail here:
// http://research.swtch.com/sparse
// The idea here is to avoid initializing threads that never
// need to be initialized, particularly for larger regexs with
// a lot of instructions.
queue: unsafe { ::std::mem::uninit() },
sparse: unsafe { ::std::mem::uninit() },
size: 0,
}
}
#[inline]
fn add(&mut self, pc: uint, groups: &Captures) {
let t = &mut self.queue[self.size];
t.pc = pc;
match self.which {
Exists => {},
Location => {
t.groups[0] = groups[0];
t.groups[1] = groups[1];
}
Submatches => {
for (slot, val) in t.groups.mut_iter().zip(groups.iter()) {
*slot = *val;
}
}
}
self.sparse[pc] = self.size;
self.size += 1;
}
#[inline]
fn add_empty(&mut self, pc: uint) {
self.queue[self.size].pc = pc;
self.sparse[pc] = self.size;
self.size += 1;
}
#[inline]
fn contains(&self, pc: uint) -> bool {
let s = self.sparse[pc];
s < self.size && self.queue[s].pc == pc
}
#[inline]
fn empty(&mut self) {
self.size = 0;
}
#[inline]
fn pc(&self, i: uint) -> uint {
self.queue[i].pc
}
#[inline]
fn groups<'r>(&'r mut self, i: uint) -> &'r mut Captures {
&'r mut self.queue[i].groups
}
}
}
::regex::Regex {
original: $regex.to_owned(),
names: vec!$cap_names,
p: ::regex::native::Native(exec),
}
})
}
// Generates code for the `add` method, which is responsible for adding
// zero-width states to the next queue of states to visit.
fn add_insts(&self) -> @ast::Expr {
let arms = self.prog.insts.iter().enumerate().map(|(pc, inst)| {
let nextpc = pc + 1;
let body = match *inst {
EmptyBegin(flags) => {
let cond =
if flags & FLAG_MULTI > 0 {
quote_expr!(self.cx,
self.chars.is_begin()
|| self.chars.prev == Some('\n')
)
} else {
quote_expr!(self.cx, self.chars.is_begin())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
EmptyEnd(flags) => {
let cond =
if flags & FLAG_MULTI > 0 {
quote_expr!(self.cx,
self.chars.is_end()
|| self.chars.cur == Some('\n')
)
} else {
quote_expr!(self.cx, self.chars.is_end())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
EmptyWordBoundary(flags) => {
let cond =
if flags & FLAG_NEGATED > 0 {
quote_expr!(self.cx,!self.chars.is_word_boundary())
} else {
quote_expr!(self.cx, self.chars.is_word_boundary())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
Save(slot) => {
let save = quote_expr!(self.cx, {
let old = groups[$slot];
groups[$slot] = Some(self.ic);
self.add(nlist, $nextpc, &mut *groups);
groups[$slot] = old;
});
let add = quote_expr!(self.cx, {
self.add(nlist, $nextpc, &mut *groups);
});
// If this is saving a submatch location but we request
// existence or only full match location, then we can skip
// right over it every time.
if slot > 1 {
quote_expr!(self.cx, {
nlist.add_empty($pc);
match self.which {
Submatches => $save,
Exists | Location => $add,
}
})
} else {
quote_expr!(self.cx, {
nlist.add_empty($pc);
match self.which {
Submatches | Location => $save,
Exists => $add,
}
})
}
}
Jump(to) => {
quote_expr!(self.cx, {
nlist.add_empty($pc);
self.add(nlist, $to, &mut *groups);
})
}
Split(x, y) => {
quote_expr!(self.cx, {
nlist.add_empty($pc);
self.add(nlist, $x, &mut *groups);
self.add(nlist, $y, &mut *groups);
})
}
// For Match, OneChar, CharClass, Any
_ => quote_expr!(self.cx, nlist.add($pc, &*groups)),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Generates the code for the `step` method, which processes all states
// in the current queue that consume a single character.
fn step_insts(&self) -> @ast::Expr {
let arms = self.prog.insts.iter().enumerate().map(|(pc, inst)| {
let nextpc = pc + 1;
let body = match *inst {
Match => {
quote_expr!(self.cx, {
match self.which {
Exists => {
return StepMatchEarlyReturn
}
Location => {
groups[0] = caps[0];
groups[1] = caps[1];
return StepMatch
}
Submatches => {
for (slot, val) in groups.mut_iter().zip(caps.iter()) {
*slot = *val;
}
return StepMatch
}
}
})
}
OneChar(c, flags) => {
if flags & FLAG_NOCASE > 0 {
let upc = c.to_uppercase();
quote_expr!(self.cx, {
let upc = self.chars.prev.map(|c| c.to_uppercase());
if upc == Some($upc) {
self.add(nlist, $nextpc, caps);
}
})
} else {
quote_expr!(self.cx, {
if self.chars.prev == Some($c) {
self.add(nlist, $nextpc, caps);
}
})
}
}
CharClass(ref ranges, flags) => {
let negate = flags & FLAG_NEGATED > 0;
let casei = flags & FLAG_NOCASE > 0;
let get_char =
if casei {
quote_expr!(self.cx, self.chars.prev.unwrap().to_uppercase())
} else {
quote_expr!(self.cx, self.chars.prev.unwrap())
};
let negcond =
if negate {
quote_expr!(self.cx,!found)
} else {
quote_expr!(self.cx, found)
};
let mranges = self.match_class(casei, ranges.as_slice());
quote_expr!(self.cx, {
if self.chars.prev.is_some() {
let c = $get_char;
let found = $mranges;
if $negcond {
self.add(nlist, $nextpc, caps);
}
}
})
}
Any(flags) => {
if flags & FLAG_DOTNL > 0 {
quote_expr!(self.cx, self.add(nlist, $nextpc, caps))
} else {
quote_expr!(self.cx, {
if self.chars.prev!= Some('\n') {
self.add(nlist, $nextpc, caps)
}
()
})
}
}
// EmptyBegin, EmptyEnd, EmptyWordBoundary, Save, Jump, Split
_ => self.empty_block(),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Translates a character class into a match expression.
// This avoids a binary search (and is hopefully replaced by a jump
// table).
fn match_class(&self, casei: bool, ranges: &[(char, char)]) -> @ast::Expr {
let expr_true = quote_expr!(self.cx, true);
let mut arms = ranges.iter().map(|&(mut start, mut end)| {
if casei {
start = start.to_uppercase();
end = end.to_uppercase();
}
let pat = self.cx.pat(self.sp, ast::PatRange(quote_expr!(self.cx, $start),
quote_expr!(self.cx, $end)));
self.cx.arm(self.sp, vec!(pat), expr_true)
}).collect::<Vec<ast::Arm>>();
arms.push(self.wild_arm_expr(quote_expr!(self.cx, false)));
let match_on = quote_expr!(self.cx, c);
self.cx.expr_match(self.sp, match_on, arms)
}
// Generates code for checking a literal prefix of the search string.
// The code is only generated if the regex *has* a literal prefix.
// Otherwise, a no-op is returned.
fn | check_prefix | identifier_name |
|
lib.rs | let num_cap_locs = 2 * self.prog.num_captures();
let num_insts = self.prog.insts.len();
let cap_names = self.vec_expr(self.names.as_slice().iter(),
|cx, name| match *name {
Some(ref name) => {
let name = name.as_slice();
quote_expr!(cx, Some($name.to_owned()))
}
None => cx.expr_none(self.sp),
}
);
let prefix_anchor =
match self.prog.insts.as_slice()[1] {
EmptyBegin(flags) if flags & FLAG_MULTI == 0 => true,
_ => false,
};
let init_groups = self.vec_expr(range(0, num_cap_locs),
|cx, _| cx.expr_none(self.sp));
let prefix_lit = Rc::new(Vec::from_slice(self.prog.prefix.as_slice().as_bytes()));
let prefix_bytes = self.cx.expr_lit(self.sp, ast::LitBinary(prefix_lit));
let check_prefix = self.check_prefix();
let step_insts = self.step_insts();
let add_insts = self.add_insts();
let regex = self.original.as_slice();
quote_expr!(self.cx, {
fn exec<'t>(which: ::regex::native::MatchKind, input: &'t str,
start: uint, end: uint) -> Vec<Option<uint>> {
#![allow(unused_imports)]
use regex::native::{
MatchKind, Exists, Location, Submatches,
StepState, StepMatchEarlyReturn, StepMatch, StepContinue,
CharReader, find_prefix,
};
return Nfa {
which: which,
input: input,
ic: 0,
chars: CharReader::new(input),
}.run(start, end);
type Captures = [Option<uint>,..$num_cap_locs];
struct Nfa<'t> {
which: MatchKind,
input: &'t str,
ic: uint,
chars: CharReader<'t>,
}
impl<'t> Nfa<'t> {
#[allow(unused_variable)]
fn run(&mut self, start: uint, end: uint) -> Vec<Option<uint>> {
let mut matched = false;
let prefix_bytes: &[u8] = &$prefix_bytes;
let mut clist = &mut Threads::new(self.which);
let mut nlist = &mut Threads::new(self.which);
let mut groups = $init_groups;
self.ic = start;
let mut next_ic = self.chars.set(start);
while self.ic <= end {
if clist.size == 0 {
if matched {
break
}
$check_prefix
}
if clist.size == 0 || (!$prefix_anchor &&!matched) {
self.add(clist, 0, &mut groups)
}
self.ic = next_ic;
next_ic = self.chars.advance();
for i in range(0, clist.size) {
let pc = clist.pc(i);
let step_state = self.step(&mut groups, nlist,
clist.groups(i), pc);
match step_state {
StepMatchEarlyReturn =>
return vec![Some(0u), Some(0u)],
StepMatch => { matched = true; break },
StepContinue => {},
}
}
::std::mem::swap(&mut clist, &mut nlist);
nlist.empty();
}
match self.which {
Exists if matched => vec![Some(0u), Some(0u)],
Exists => vec![None, None],
Location | Submatches => groups.iter().map(|x| *x).collect(),
}
}
// Sometimes `nlist` is never used (for empty regexes).
#[allow(unused_variable)]
#[inline]
fn step(&self, groups: &mut Captures, nlist: &mut Threads,
caps: &mut Captures, pc: uint) -> StepState {
$step_insts
StepContinue
}
fn add(&self, nlist: &mut Threads, pc: uint,
groups: &mut Captures) {
if nlist.contains(pc) {
return
}
$add_insts
}
}
struct Thread {
pc: uint,
groups: Captures,
}
struct Threads {
which: MatchKind,
queue: [Thread,..$num_insts],
sparse: [uint,..$num_insts],
size: uint,
}
impl Threads {
fn new(which: MatchKind) -> Threads {
Threads {
which: which,
// These unsafe blocks are used for performance reasons, as it
// gives us a zero-cost initialization of a sparse set. The
// trick is described in more detail here:
// http://research.swtch.com/sparse
// The idea here is to avoid initializing threads that never
// need to be initialized, particularly for larger regexs with
// a lot of instructions.
queue: unsafe { ::std::mem::uninit() },
sparse: unsafe { ::std::mem::uninit() },
size: 0,
}
}
#[inline]
fn add(&mut self, pc: uint, groups: &Captures) {
let t = &mut self.queue[self.size];
t.pc = pc;
match self.which {
Exists => {},
Location => {
t.groups[0] = groups[0];
t.groups[1] = groups[1];
}
Submatches => {
for (slot, val) in t.groups.mut_iter().zip(groups.iter()) {
*slot = *val;
}
}
}
self.sparse[pc] = self.size;
self.size += 1;
}
#[inline]
fn add_empty(&mut self, pc: uint) {
self.queue[self.size].pc = pc;
self.sparse[pc] = self.size;
self.size += 1;
}
#[inline]
fn contains(&self, pc: uint) -> bool {
let s = self.sparse[pc];
s < self.size && self.queue[s].pc == pc
}
#[inline]
fn empty(&mut self) {
self.size = 0;
}
#[inline]
fn pc(&self, i: uint) -> uint {
self.queue[i].pc
}
#[inline]
fn groups<'r>(&'r mut self, i: uint) -> &'r mut Captures {
&'r mut self.queue[i].groups
}
}
}
::regex::Regex {
original: $regex.to_owned(),
names: vec!$cap_names,
p: ::regex::native::Native(exec),
}
})
}
// Generates code for the `add` method, which is responsible for adding
// zero-width states to the next queue of states to visit.
fn add_insts(&self) -> @ast::Expr {
let arms = self.prog.insts.iter().enumerate().map(|(pc, inst)| {
let nextpc = pc + 1;
let body = match *inst {
EmptyBegin(flags) => {
let cond =
if flags & FLAG_MULTI > 0 {
quote_expr!(self.cx,
self.chars.is_begin()
|| self.chars.prev == Some('\n')
)
} else {
quote_expr!(self.cx, self.chars.is_begin())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
EmptyEnd(flags) => {
let cond =
if flags & FLAG_MULTI > 0 {
quote_expr!(self.cx,
self.chars.is_end()
|| self.chars.cur == Some('\n')
)
} else {
quote_expr!(self.cx, self.chars.is_end())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
EmptyWordBoundary(flags) => {
let cond =
if flags & FLAG_NEGATED > 0 {
quote_expr!(self.cx,!self.chars.is_word_boundary())
} else {
quote_expr!(self.cx, self.chars.is_word_boundary())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
Save(slot) => {
let save = quote_expr!(self.cx, {
let old = groups[$slot];
groups[$slot] = Some(self.ic);
self.add(nlist, $nextpc, &mut *groups);
groups[$slot] = old;
});
let add = quote_expr!(self.cx, {
self.add(nlist, $nextpc, &mut *groups);
});
// If this is saving a submatch location but we request
// existence or only full match location, then we can skip
// right over it every time.
if slot > 1 {
quote_expr!(self.cx, {
nlist.add_empty($pc);
match self.which {
Submatches => $save,
Exists | Location => $add,
}
})
} else {
quote_expr!(self.cx, {
nlist.add_empty($pc);
match self.which {
Submatches | Location => $save,
Exists => $add,
}
})
}
}
Jump(to) => {
quote_expr!(self.cx, {
nlist.add_empty($pc);
self.add(nlist, $to, &mut *groups);
})
}
Split(x, y) => {
quote_expr!(self.cx, {
nlist.add_empty($pc);
self.add(nlist, $x, &mut *groups);
self.add(nlist, $y, &mut *groups);
})
}
// For Match, OneChar, CharClass, Any
_ => quote_expr!(self.cx, nlist.add($pc, &*groups)),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Generates the code for the `step` method, which processes all states
// in the current queue that consume a single character.
fn step_insts(&self) -> @ast::Expr { | let arms = self.prog.insts.iter().enumerate().map(|(pc, inst)| {
let nextpc = pc + 1;
let body = match *inst {
Match => {
quote_expr!(self.cx, {
match self.which {
Exists => {
return StepMatchEarlyReturn
}
Location => {
groups[0] = caps[0];
groups[1] = caps[1];
return StepMatch
}
Submatches => {
for (slot, val) in groups.mut_iter().zip(caps.iter()) {
*slot = *val;
}
return StepMatch
}
}
})
}
OneChar(c, flags) => {
if flags & FLAG_NOCASE > 0 {
let upc = c.to_uppercase();
quote_expr!(self.cx, {
let upc = self.chars.prev.map(|c| c.to_uppercase());
if upc == Some($upc) {
self.add(nlist, $nextpc, caps);
}
})
} else {
quote_expr!(self.cx, {
if self.chars.prev == Some($c) {
self.add(nlist, $nextpc, caps);
}
})
}
}
CharClass(ref ranges, flags) => {
let negate = flags & FLAG_NEGATED > 0;
let casei = flags & FLAG_NOCASE > 0;
let get_char =
if casei {
quote_expr!(self.cx, self.chars.prev.unwrap().to_uppercase())
} else {
quote_expr!(self.cx, self.chars.prev.unwrap())
};
let negcond =
if negate {
quote_expr!(self.cx,!found)
} else {
| random_line_split |
|
lib.rs | here:
// http://research.swtch.com/sparse
// The idea here is to avoid initializing threads that never
// need to be initialized, particularly for larger regexs with
// a lot of instructions.
queue: unsafe { ::std::mem::uninit() },
sparse: unsafe { ::std::mem::uninit() },
size: 0,
}
}
#[inline]
fn add(&mut self, pc: uint, groups: &Captures) {
let t = &mut self.queue[self.size];
t.pc = pc;
match self.which {
Exists => {},
Location => {
t.groups[0] = groups[0];
t.groups[1] = groups[1];
}
Submatches => {
for (slot, val) in t.groups.mut_iter().zip(groups.iter()) {
*slot = *val;
}
}
}
self.sparse[pc] = self.size;
self.size += 1;
}
#[inline]
fn add_empty(&mut self, pc: uint) {
self.queue[self.size].pc = pc;
self.sparse[pc] = self.size;
self.size += 1;
}
#[inline]
fn contains(&self, pc: uint) -> bool {
let s = self.sparse[pc];
s < self.size && self.queue[s].pc == pc
}
#[inline]
fn empty(&mut self) {
self.size = 0;
}
#[inline]
fn pc(&self, i: uint) -> uint {
self.queue[i].pc
}
#[inline]
fn groups<'r>(&'r mut self, i: uint) -> &'r mut Captures {
&'r mut self.queue[i].groups
}
}
}
::regex::Regex {
original: $regex.to_owned(),
names: vec!$cap_names,
p: ::regex::native::Native(exec),
}
})
}
// Generates code for the `add` method, which is responsible for adding
// zero-width states to the next queue of states to visit.
fn add_insts(&self) -> @ast::Expr {
let arms = self.prog.insts.iter().enumerate().map(|(pc, inst)| {
let nextpc = pc + 1;
let body = match *inst {
EmptyBegin(flags) => {
let cond =
if flags & FLAG_MULTI > 0 {
quote_expr!(self.cx,
self.chars.is_begin()
|| self.chars.prev == Some('\n')
)
} else {
quote_expr!(self.cx, self.chars.is_begin())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
EmptyEnd(flags) => {
let cond =
if flags & FLAG_MULTI > 0 {
quote_expr!(self.cx,
self.chars.is_end()
|| self.chars.cur == Some('\n')
)
} else {
quote_expr!(self.cx, self.chars.is_end())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
EmptyWordBoundary(flags) => {
let cond =
if flags & FLAG_NEGATED > 0 {
quote_expr!(self.cx,!self.chars.is_word_boundary())
} else {
quote_expr!(self.cx, self.chars.is_word_boundary())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
Save(slot) => {
let save = quote_expr!(self.cx, {
let old = groups[$slot];
groups[$slot] = Some(self.ic);
self.add(nlist, $nextpc, &mut *groups);
groups[$slot] = old;
});
let add = quote_expr!(self.cx, {
self.add(nlist, $nextpc, &mut *groups);
});
// If this is saving a submatch location but we request
// existence or only full match location, then we can skip
// right over it every time.
if slot > 1 {
quote_expr!(self.cx, {
nlist.add_empty($pc);
match self.which {
Submatches => $save,
Exists | Location => $add,
}
})
} else {
quote_expr!(self.cx, {
nlist.add_empty($pc);
match self.which {
Submatches | Location => $save,
Exists => $add,
}
})
}
}
Jump(to) => {
quote_expr!(self.cx, {
nlist.add_empty($pc);
self.add(nlist, $to, &mut *groups);
})
}
Split(x, y) => {
quote_expr!(self.cx, {
nlist.add_empty($pc);
self.add(nlist, $x, &mut *groups);
self.add(nlist, $y, &mut *groups);
})
}
// For Match, OneChar, CharClass, Any
_ => quote_expr!(self.cx, nlist.add($pc, &*groups)),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Generates the code for the `step` method, which processes all states
// in the current queue that consume a single character.
fn step_insts(&self) -> @ast::Expr {
let arms = self.prog.insts.iter().enumerate().map(|(pc, inst)| {
let nextpc = pc + 1;
let body = match *inst {
Match => {
quote_expr!(self.cx, {
match self.which {
Exists => {
return StepMatchEarlyReturn
}
Location => {
groups[0] = caps[0];
groups[1] = caps[1];
return StepMatch
}
Submatches => {
for (slot, val) in groups.mut_iter().zip(caps.iter()) {
*slot = *val;
}
return StepMatch
}
}
})
}
OneChar(c, flags) => {
if flags & FLAG_NOCASE > 0 {
let upc = c.to_uppercase();
quote_expr!(self.cx, {
let upc = self.chars.prev.map(|c| c.to_uppercase());
if upc == Some($upc) {
self.add(nlist, $nextpc, caps);
}
})
} else {
quote_expr!(self.cx, {
if self.chars.prev == Some($c) {
self.add(nlist, $nextpc, caps);
}
})
}
}
CharClass(ref ranges, flags) => {
let negate = flags & FLAG_NEGATED > 0;
let casei = flags & FLAG_NOCASE > 0;
let get_char =
if casei {
quote_expr!(self.cx, self.chars.prev.unwrap().to_uppercase())
} else {
quote_expr!(self.cx, self.chars.prev.unwrap())
};
let negcond =
if negate {
quote_expr!(self.cx,!found)
} else {
quote_expr!(self.cx, found)
};
let mranges = self.match_class(casei, ranges.as_slice());
quote_expr!(self.cx, {
if self.chars.prev.is_some() {
let c = $get_char;
let found = $mranges;
if $negcond {
self.add(nlist, $nextpc, caps);
}
}
})
}
Any(flags) => {
if flags & FLAG_DOTNL > 0 {
quote_expr!(self.cx, self.add(nlist, $nextpc, caps))
} else {
quote_expr!(self.cx, {
if self.chars.prev!= Some('\n') {
self.add(nlist, $nextpc, caps)
}
()
})
}
}
// EmptyBegin, EmptyEnd, EmptyWordBoundary, Save, Jump, Split
_ => self.empty_block(),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Translates a character class into a match expression.
// This avoids a binary search (and is hopefully replaced by a jump
// table).
fn match_class(&self, casei: bool, ranges: &[(char, char)]) -> @ast::Expr {
let expr_true = quote_expr!(self.cx, true);
let mut arms = ranges.iter().map(|&(mut start, mut end)| {
if casei {
start = start.to_uppercase();
end = end.to_uppercase();
}
let pat = self.cx.pat(self.sp, ast::PatRange(quote_expr!(self.cx, $start),
quote_expr!(self.cx, $end)));
self.cx.arm(self.sp, vec!(pat), expr_true)
}).collect::<Vec<ast::Arm>>();
arms.push(self.wild_arm_expr(quote_expr!(self.cx, false)));
let match_on = quote_expr!(self.cx, c);
self.cx.expr_match(self.sp, match_on, arms)
}
// Generates code for checking a literal prefix of the search string.
// The code is only generated if the regex *has* a literal prefix.
// Otherwise, a no-op is returned.
fn check_prefix(&self) -> @ast::Expr {
if self.prog.prefix.len() == 0 {
self.empty_block()
} else {
quote_expr!(self.cx,
if clist.size == 0 {
let haystack = self.input.as_bytes().slice_from(self.ic);
match find_prefix(prefix_bytes, haystack) {
None => break,
Some(i) => {
self.ic += i;
next_ic = self.chars.set(self.ic);
}
}
}
)
}
}
// Builds a `match pc {... }` expression from a list of arms, specifically
// for matching the current program counter with an instruction.
// A wild-card arm is automatically added that executes a no-op. It will
// never be used, but is added to satisfy the compiler complaining about
// non-exhaustive patterns.
fn match_insts(&self, mut arms: Vec<ast::Arm>) -> @ast::Expr {
arms.push(self.wild_arm_expr(self.empty_block()));
self.cx.expr_match(self.sp, quote_expr!(self.cx, pc), arms)
}
fn empty_block(&self) -> @ast::Expr {
quote_expr!(self.cx, {})
}
// Creates a match arm for the instruction at `pc` with the expression
// `body`.
fn arm_inst(&self, pc: uint, body: @ast::Expr) -> ast::Arm {
let pc_pat = self.cx.pat_lit(self.sp, quote_expr!(self.cx, $pc));
self.cx.arm(self.sp, vec!(pc_pat), body)
}
// Creates a wild-card match arm with the expression `body`.
fn wild_arm_expr(&self, body: @ast::Expr) -> ast::Arm {
ast::Arm {
attrs: vec!(),
pats: vec!(@ast::Pat{
id: ast::DUMMY_NODE_ID,
span: self.sp,
node: ast::PatWild,
}),
guard: None,
body: body,
}
}
// Converts `xs` to a `[x1, x2,.., xN]` expression by calling `to_expr`
// on each element in `xs`.
fn vec_expr<T, It: Iterator<T>>(&self, xs: It, to_expr: |&ExtCtxt, T| -> @ast::Expr)
-> @ast::Expr | {
let exprs = xs.map(|x| to_expr(self.cx, x)).collect();
self.cx.expr_vec(self.sp, exprs)
} | identifier_body |
|
cleanup_sst.rs | // Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
use std::fmt;
use std::sync::Arc;
use kvproto::import_sstpb::SstMeta;
use crate::store::util::is_epoch_stale;
use crate::store::{StoreMsg, StoreRouter};
use engine_traits::KvEngine;
use pd_client::PdClient;
use sst_importer::SSTImporter;
use std::marker::PhantomData;
use tikv_util::worker::Runnable;
pub enum Task {
DeleteSST { ssts: Vec<SstMeta> },
ValidateSST { ssts: Vec<SstMeta> },
}
impl fmt::Display for Task {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Task::DeleteSST { ref ssts } => write!(f, "Delete {} ssts", ssts.len()),
Task::ValidateSST { ref ssts } => write!(f, "Validate {} ssts", ssts.len()),
}
}
}
pub struct Runner<EK, C, S>
where
EK: KvEngine,
S: StoreRouter<EK>,
{
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
_engine: PhantomData<EK>,
}
impl<EK, C, S> Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
pub fn | (
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
) -> Runner<EK, C, S> {
Runner {
store_id,
store_router,
importer,
pd_client,
_engine: PhantomData,
}
}
/// Deletes SST files from the importer.
fn handle_delete_sst(&self, ssts: Vec<SstMeta>) {
for sst in &ssts {
let _ = self.importer.delete(sst);
}
}
/// Validates whether the SST is stale or not.
fn handle_validate_sst(&self, ssts: Vec<SstMeta>) {
let store_id = self.store_id;
let mut invalid_ssts = Vec::new();
for sst in ssts {
match self.pd_client.get_region(sst.get_range().get_start()) {
Ok(r) => {
// The region id may or may not be the same as the
// SST file, but it doesn't matter, because the
// epoch of a range will not decrease anyway.
if is_epoch_stale(r.get_region_epoch(), sst.get_region_epoch()) {
// Region has not been updated.
continue;
}
if r.get_id() == sst.get_region_id()
&& r.get_peers().iter().any(|p| p.get_store_id() == store_id)
{
// The SST still belongs to this store.
continue;
}
invalid_ssts.push(sst);
}
Err(e) => {
error!(%e; "get region failed");
}
}
}
// We need to send back the result to check for the stale
// peer, which may ingest the stale SST before it is
// destroyed.
let msg = StoreMsg::ValidateSSTResult { invalid_ssts };
if let Err(e) = self.store_router.send(msg) {
error!(%e; "send validate sst result failed");
}
}
}
impl<EK, C, S> Runnable for Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
type Task = Task;
fn run(&mut self, task: Task) {
match task {
Task::DeleteSST { ssts } => {
self.handle_delete_sst(ssts);
}
Task::ValidateSST { ssts } => {
self.handle_validate_sst(ssts);
}
}
}
}
| new | identifier_name |
cleanup_sst.rs | // Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
use std::fmt;
use std::sync::Arc;
use kvproto::import_sstpb::SstMeta;
use crate::store::util::is_epoch_stale;
use crate::store::{StoreMsg, StoreRouter};
use engine_traits::KvEngine;
use pd_client::PdClient;
use sst_importer::SSTImporter;
use std::marker::PhantomData;
use tikv_util::worker::Runnable;
pub enum Task {
DeleteSST { ssts: Vec<SstMeta> },
ValidateSST { ssts: Vec<SstMeta> },
}
impl fmt::Display for Task {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Task::DeleteSST { ref ssts } => write!(f, "Delete {} ssts", ssts.len()),
Task::ValidateSST { ref ssts } => write!(f, "Validate {} ssts", ssts.len()),
}
}
}
pub struct Runner<EK, C, S>
where
EK: KvEngine,
S: StoreRouter<EK>,
{
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
_engine: PhantomData<EK>,
}
impl<EK, C, S> Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
pub fn new(
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
) -> Runner<EK, C, S> {
Runner {
store_id,
store_router,
importer,
pd_client,
_engine: PhantomData,
}
}
/// Deletes SST files from the importer.
fn handle_delete_sst(&self, ssts: Vec<SstMeta>) {
for sst in &ssts {
let _ = self.importer.delete(sst);
}
}
/// Validates whether the SST is stale or not.
fn handle_validate_sst(&self, ssts: Vec<SstMeta>) | }
Err(e) => {
error!(%e; "get region failed");
}
}
}
// We need to send back the result to check for the stale
// peer, which may ingest the stale SST before it is
// destroyed.
let msg = StoreMsg::ValidateSSTResult { invalid_ssts };
if let Err(e) = self.store_router.send(msg) {
error!(%e; "send validate sst result failed");
}
}
}
impl<EK, C, S> Runnable for Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
type Task = Task;
fn run(&mut self, task: Task) {
match task {
Task::DeleteSST { ssts } => {
self.handle_delete_sst(ssts);
}
Task::ValidateSST { ssts } => {
self.handle_validate_sst(ssts);
}
}
}
}
| {
let store_id = self.store_id;
let mut invalid_ssts = Vec::new();
for sst in ssts {
match self.pd_client.get_region(sst.get_range().get_start()) {
Ok(r) => {
// The region id may or may not be the same as the
// SST file, but it doesn't matter, because the
// epoch of a range will not decrease anyway.
if is_epoch_stale(r.get_region_epoch(), sst.get_region_epoch()) {
// Region has not been updated.
continue;
}
if r.get_id() == sst.get_region_id()
&& r.get_peers().iter().any(|p| p.get_store_id() == store_id)
{
// The SST still belongs to this store.
continue;
}
invalid_ssts.push(sst); | identifier_body |
cleanup_sst.rs | // Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
use std::fmt;
use std::sync::Arc;
use kvproto::import_sstpb::SstMeta;
use crate::store::util::is_epoch_stale;
use crate::store::{StoreMsg, StoreRouter};
use engine_traits::KvEngine;
use pd_client::PdClient;
use sst_importer::SSTImporter;
use std::marker::PhantomData;
use tikv_util::worker::Runnable;
pub enum Task {
DeleteSST { ssts: Vec<SstMeta> },
ValidateSST { ssts: Vec<SstMeta> },
}
impl fmt::Display for Task {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Task::DeleteSST { ref ssts } => write!(f, "Delete {} ssts", ssts.len()),
Task::ValidateSST { ref ssts } => write!(f, "Validate {} ssts", ssts.len()),
}
}
}
pub struct Runner<EK, C, S>
where
EK: KvEngine,
S: StoreRouter<EK>,
{
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
_engine: PhantomData<EK>,
}
impl<EK, C, S> Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
pub fn new(
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
) -> Runner<EK, C, S> {
Runner {
store_id,
store_router,
importer,
pd_client,
_engine: PhantomData,
}
}
/// Deletes SST files from the importer.
fn handle_delete_sst(&self, ssts: Vec<SstMeta>) {
for sst in &ssts {
let _ = self.importer.delete(sst);
}
}
/// Validates whether the SST is stale or not. | let mut invalid_ssts = Vec::new();
for sst in ssts {
match self.pd_client.get_region(sst.get_range().get_start()) {
Ok(r) => {
// The region id may or may not be the same as the
// SST file, but it doesn't matter, because the
// epoch of a range will not decrease anyway.
if is_epoch_stale(r.get_region_epoch(), sst.get_region_epoch()) {
// Region has not been updated.
continue;
}
if r.get_id() == sst.get_region_id()
&& r.get_peers().iter().any(|p| p.get_store_id() == store_id)
{
// The SST still belongs to this store.
continue;
}
invalid_ssts.push(sst);
}
Err(e) => {
error!(%e; "get region failed");
}
}
}
// We need to send back the result to check for the stale
// peer, which may ingest the stale SST before it is
// destroyed.
let msg = StoreMsg::ValidateSSTResult { invalid_ssts };
if let Err(e) = self.store_router.send(msg) {
error!(%e; "send validate sst result failed");
}
}
}
impl<EK, C, S> Runnable for Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
type Task = Task;
fn run(&mut self, task: Task) {
match task {
Task::DeleteSST { ssts } => {
self.handle_delete_sst(ssts);
}
Task::ValidateSST { ssts } => {
self.handle_validate_sst(ssts);
}
}
}
} | fn handle_validate_sst(&self, ssts: Vec<SstMeta>) {
let store_id = self.store_id; | random_line_split |
cleanup_sst.rs | // Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
use std::fmt;
use std::sync::Arc;
use kvproto::import_sstpb::SstMeta;
use crate::store::util::is_epoch_stale;
use crate::store::{StoreMsg, StoreRouter};
use engine_traits::KvEngine;
use pd_client::PdClient;
use sst_importer::SSTImporter;
use std::marker::PhantomData;
use tikv_util::worker::Runnable;
pub enum Task {
DeleteSST { ssts: Vec<SstMeta> },
ValidateSST { ssts: Vec<SstMeta> },
}
impl fmt::Display for Task {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Task::DeleteSST { ref ssts } => write!(f, "Delete {} ssts", ssts.len()),
Task::ValidateSST { ref ssts } => write!(f, "Validate {} ssts", ssts.len()),
}
}
}
pub struct Runner<EK, C, S>
where
EK: KvEngine,
S: StoreRouter<EK>,
{
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
_engine: PhantomData<EK>,
}
impl<EK, C, S> Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
pub fn new(
store_id: u64,
store_router: S,
importer: Arc<SSTImporter>,
pd_client: Arc<C>,
) -> Runner<EK, C, S> {
Runner {
store_id,
store_router,
importer,
pd_client,
_engine: PhantomData,
}
}
/// Deletes SST files from the importer.
fn handle_delete_sst(&self, ssts: Vec<SstMeta>) {
for sst in &ssts {
let _ = self.importer.delete(sst);
}
}
/// Validates whether the SST is stale or not.
fn handle_validate_sst(&self, ssts: Vec<SstMeta>) {
let store_id = self.store_id;
let mut invalid_ssts = Vec::new();
for sst in ssts {
match self.pd_client.get_region(sst.get_range().get_start()) {
Ok(r) => {
// The region id may or may not be the same as the
// SST file, but it doesn't matter, because the
// epoch of a range will not decrease anyway.
if is_epoch_stale(r.get_region_epoch(), sst.get_region_epoch()) {
// Region has not been updated.
continue;
}
if r.get_id() == sst.get_region_id()
&& r.get_peers().iter().any(|p| p.get_store_id() == store_id)
{
// The SST still belongs to this store.
continue;
}
invalid_ssts.push(sst);
}
Err(e) => |
}
}
// We need to send back the result to check for the stale
// peer, which may ingest the stale SST before it is
// destroyed.
let msg = StoreMsg::ValidateSSTResult { invalid_ssts };
if let Err(e) = self.store_router.send(msg) {
error!(%e; "send validate sst result failed");
}
}
}
impl<EK, C, S> Runnable for Runner<EK, C, S>
where
EK: KvEngine,
C: PdClient,
S: StoreRouter<EK>,
{
type Task = Task;
fn run(&mut self, task: Task) {
match task {
Task::DeleteSST { ssts } => {
self.handle_delete_sst(ssts);
}
Task::ValidateSST { ssts } => {
self.handle_validate_sst(ssts);
}
}
}
}
| {
error!(%e; "get region failed");
} | conditional_block |
resources.rs | use serde::de::Deserialize;
use serde_json;
use std::{collections::HashMap, str};
use economy::Commodity;
use entities::Faction;
use entities::PlanetEconomy;
/// Generic Resource trait to be implemented by all resource types which should
/// be loaded at compile time.
/// KEY must be unique to the specific resource (e.g the filename of the
/// resource).
pub trait Resource: Deserialize<'static> {
const KEY: &'static str;
}
lazy_static! {
// Load resources at compile time.
// TODO: Convert to resource at compile time to save resources.
static ref RESOURCES: HashMap<&'static str, &'static str> = {
let mut res = HashMap::new();
res.insert(
AstronomicalNamesResource::KEY,
include_str!("../res/astronomical_names.json"),
);
res.insert(
AgentResource::KEY,
include_str!("../res/economic_agents.json"),
);
res
};
}
/// Attempts to returns the resource with the given type, will return None
/// if the type has no resource or if the deserialization fails.
pub fn fetch_resource<T: Resource>() -> Option<T> {
let res_str = RESOURCES.get(T::KEY).unwrap();
match serde_json::from_str(res_str) {
Ok(res) => Some(res),
Err(msg) => {
error!("{}", msg);
None
}
}
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
/// Resource used in name generation of celestial objects.
pub struct | {
pub names: Vec<String>,
pub scientific_names: Vec<String>,
pub greek: Vec<String>,
pub roman: Vec<String>,
pub decorators: Vec<String>,
}
impl Resource for AstronomicalNamesResource {
const KEY: &'static str = "astronomical_names";
}
#[derive(Serialize, Deserialize, Debug)]
/// Resource containing all production/consumptions for factions and planets.
pub struct AgentResource {
pub faction_ideals: HashMap<Faction, HashMap<Commodity, u64>>,
pub faction_production: HashMap<Faction, HashMap<Commodity, u64>>,
pub planet_ideals: HashMap<PlanetEconomy, HashMap<Commodity, u64>>,
pub planet_production: HashMap<PlanetEconomy, HashMap<Commodity, u64>>,
}
impl Resource for AgentResource {
const KEY: &'static str = "economic_agents";
}
| AstronomicalNamesResource | identifier_name |
resources.rs | use serde::de::Deserialize;
use serde_json;
use std::{collections::HashMap, str};
use economy::Commodity;
use entities::Faction;
use entities::PlanetEconomy;
/// Generic Resource trait to be implemented by all resource types which should
/// be loaded at compile time.
/// KEY must be unique to the specific resource (e.g the filename of the
/// resource).
pub trait Resource: Deserialize<'static> {
const KEY: &'static str;
}
lazy_static! {
// Load resources at compile time.
// TODO: Convert to resource at compile time to save resources.
static ref RESOURCES: HashMap<&'static str, &'static str> = {
let mut res = HashMap::new();
res.insert(
AstronomicalNamesResource::KEY,
include_str!("../res/astronomical_names.json"),
);
res.insert(
AgentResource::KEY,
include_str!("../res/economic_agents.json"),
);
res
};
}
/// Attempts to returns the resource with the given type, will return None | /// if the type has no resource or if the deserialization fails.
pub fn fetch_resource<T: Resource>() -> Option<T> {
let res_str = RESOURCES.get(T::KEY).unwrap();
match serde_json::from_str(res_str) {
Ok(res) => Some(res),
Err(msg) => {
error!("{}", msg);
None
}
}
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
/// Resource used in name generation of celestial objects.
pub struct AstronomicalNamesResource {
pub names: Vec<String>,
pub scientific_names: Vec<String>,
pub greek: Vec<String>,
pub roman: Vec<String>,
pub decorators: Vec<String>,
}
impl Resource for AstronomicalNamesResource {
const KEY: &'static str = "astronomical_names";
}
#[derive(Serialize, Deserialize, Debug)]
/// Resource containing all production/consumptions for factions and planets.
pub struct AgentResource {
pub faction_ideals: HashMap<Faction, HashMap<Commodity, u64>>,
pub faction_production: HashMap<Faction, HashMap<Commodity, u64>>,
pub planet_ideals: HashMap<PlanetEconomy, HashMap<Commodity, u64>>,
pub planet_production: HashMap<PlanetEconomy, HashMap<Commodity, u64>>,
}
impl Resource for AgentResource {
const KEY: &'static str = "economic_agents";
} | random_line_split |
|
previous.rs | use std::path::Path;
use serde::{
Deserialize,
Serialize,
};
use anyhow::Result;
use rnc_core::grouper;
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
pub struct | {
pub id: usize,
pub urs_id: usize,
pub urs_taxid: String,
upi: String,
taxid: usize,
databases: Option<String>,
description: Option<String>,
has_coordinates: Option<bool>,
is_active: Option<bool>,
last_release: Option<usize>,
rna_type: Option<String>,
short_description: Option<String>,
so_rna_type: Option<String>,
}
impl grouper::HasIndex for Previous {
fn index(&self) -> usize {
self.id
}
}
pub fn group(path: &Path, max: usize, output: &Path) -> Result<()> {
grouper::group::<Previous>(grouper::Criteria::ZeroOrOne, &path, 1, max, &output)
}
| Previous | identifier_name |
previous.rs | use std::path::Path;
use serde::{
Deserialize,
Serialize,
};
use anyhow::Result;
use rnc_core::grouper;
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
pub struct Previous {
pub id: usize,
pub urs_id: usize,
pub urs_taxid: String,
upi: String,
taxid: usize,
databases: Option<String>,
description: Option<String>,
has_coordinates: Option<bool>,
is_active: Option<bool>,
last_release: Option<usize>,
rna_type: Option<String>,
short_description: Option<String>,
so_rna_type: Option<String>,
}
impl grouper::HasIndex for Previous {
fn index(&self) -> usize |
}
pub fn group(path: &Path, max: usize, output: &Path) -> Result<()> {
grouper::group::<Previous>(grouper::Criteria::ZeroOrOne, &path, 1, max, &output)
}
| {
self.id
} | identifier_body |
previous.rs | use std::path::Path;
use serde::{
Deserialize,
Serialize,
};
use anyhow::Result;
use rnc_core::grouper;
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
pub struct Previous {
pub id: usize,
pub urs_id: usize,
pub urs_taxid: String,
upi: String,
taxid: usize,
databases: Option<String>,
description: Option<String>,
has_coordinates: Option<bool>,
is_active: Option<bool>, | }
impl grouper::HasIndex for Previous {
fn index(&self) -> usize {
self.id
}
}
pub fn group(path: &Path, max: usize, output: &Path) -> Result<()> {
grouper::group::<Previous>(grouper::Criteria::ZeroOrOne, &path, 1, max, &output)
} | last_release: Option<usize>,
rna_type: Option<String>,
short_description: Option<String>,
so_rna_type: Option<String>, | random_line_split |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! This module contains shared types and messages for use by devtools/script.
//! The traits are here instead of in script so that the devtools crate can be
//! modified independently of the rest of Servo.
#![crate_name = "devtools_traits"]
#![crate_type = "rlib"]
#![allow(non_snake_case)]
#![deny(unsafe_code)]
#![feature(custom_derive, plugin)]
#![plugin(heapsize_plugin, serde_macros)]
#[macro_use]
extern crate bitflags;
extern crate heapsize;
extern crate hyper;
extern crate ipc_channel;
extern crate msg;
extern crate serde;
extern crate time;
extern crate url;
extern crate util;
use hyper::header::Headers;
use hyper::http::RawStatus;
use hyper::method::Method;
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::PipelineId;
use std::net::TcpStream;
use time::Duration;
use time::Tm;
use url::Url;
// Information would be attached to NewGlobal to be received and show in devtools.
// Extend these fields if we need more information.
#[derive(Deserialize, Serialize)]
pub struct DevtoolsPageInfo {
pub title: String,
pub url: Url
}
#[derive(Deserialize, HeapSizeOf, Serialize, Clone)]
pub struct CSSError {
pub filename: String,
pub line: usize,
pub column: usize,
pub msg: String
}
/// Messages to instruct the devtools server to update its known actors/state
/// according to changes in the browser.
pub enum DevtoolsControlMsg {
/// Messages from threads in the chrome process (resource/constellation/devtools)
FromChrome(ChromeToDevtoolsControlMsg),
/// Messages from script threads
FromScript(ScriptToDevtoolsControlMsg),
}
/// Events that the devtools server must act upon.
pub enum ChromeToDevtoolsControlMsg {
/// A new client has connected to the server.
AddClient(TcpStream),
/// The browser is shutting down.
ServerExitMsg,
/// A network event occurred (request, reply, etc.). The actor with the
/// provided name should be notified.
NetworkEvent(String, NetworkEvent),
}
#[derive(Deserialize, Serialize)]
/// Events that the devtools server must act upon.
pub enum ScriptToDevtoolsControlMsg {
/// A new global object was created, associated with a particular pipeline.
/// The means of communicating directly with it are provided.
NewGlobal((PipelineId, Option<WorkerId>),
IpcSender<DevtoolScriptControlMsg>,
DevtoolsPageInfo),
/// A particular page has invoked the console API.
ConsoleAPI(PipelineId, ConsoleMessage, Option<WorkerId>),
/// An animation frame with the given timestamp was processed in a script thread.
/// The actor with the provided name should be notified.
FramerateTick(String, f64),
/// Report a CSS parse error for the given pipeline
ReportCSSError(PipelineId, CSSError),
}
/// Serialized JS return values
/// TODO: generalize this beyond the EvaluateJS message?
#[derive(Deserialize, Serialize)]
pub enum EvaluateJSReply {
VoidValue,
NullValue,
BooleanValue(bool),
NumberValue(f64),
StringValue(String),
ActorValue { class: String, uuid: String },
}
#[derive(Deserialize, Serialize)]
pub struct AttrInfo {
pub namespace: String,
pub name: String,
pub value: String,
}
#[derive(Deserialize, Serialize)]
pub struct NodeInfo {
pub uniqueId: String,
pub baseURI: String,
pub parent: String,
pub nodeType: u16,
pub namespaceURI: String,
pub nodeName: String,
pub numChildren: usize,
pub name: String,
pub publicId: String,
pub systemId: String,
pub attrs: Vec<AttrInfo>,
pub isDocumentElement: bool,
pub shortValue: String,
pub incompleteValue: bool,
}
pub struct StartedTimelineMarker {
name: String,
start_time: PreciseTime,
start_stack: Option<Vec<()>>,
}
#[derive(Deserialize, Serialize)]
pub struct TimelineMarker {
pub name: String,
pub start_time: PreciseTime,
pub start_stack: Option<Vec<()>>,
pub end_time: PreciseTime,
pub end_stack: Option<Vec<()>>,
}
#[derive(PartialEq, Eq, Hash, Clone, Deserialize, Serialize)]
pub enum TimelineMarkerType {
Reflow,
DOMEvent,
}
/// The properties of a DOM node as computed by layout.
#[derive(Deserialize, Serialize)]
pub struct ComputedNodeLayout {
pub display: String,
pub position: String,
pub zIndex: String,
pub boxSizing: String,
pub autoMargins: AutoMargins,
pub marginTop: String,
pub marginRight: String,
pub marginBottom: String,
pub marginLeft: String,
pub borderTopWidth: String,
pub borderRightWidth: String,
pub borderBottomWidth: String,
pub borderLeftWidth: String,
pub paddingTop: String,
pub paddingRight: String,
pub paddingBottom: String,
pub paddingLeft: String,
pub width: f32,
pub height: f32,
}
#[derive(Deserialize, Serialize)]
pub struct AutoMargins {
pub top: bool,
pub right: bool,
pub bottom: bool,
pub left: bool,
}
/// Messages to process in a particular script thread, as instructed by a devtools client.
#[derive(Deserialize, Serialize)]
pub enum DevtoolScriptControlMsg {
/// Evaluate a JS snippet in the context of the global for the given pipeline.
EvaluateJS(PipelineId, String, IpcSender<EvaluateJSReply>),
/// Retrieve the details of the root node (ie. the document) for the given pipeline.
GetRootNode(PipelineId, IpcSender<NodeInfo>),
/// Retrieve the details of the document element for the given pipeline.
GetDocumentElement(PipelineId, IpcSender<NodeInfo>),
/// Retrieve the details of the child nodes of the given node in the given pipeline.
GetChildren(PipelineId, String, IpcSender<Vec<NodeInfo>>),
/// Retrieve the computed layout properties of the given node in the given pipeline.
GetLayout(PipelineId, String, IpcSender<ComputedNodeLayout>),
/// Retrieve all stored console messages for the given pipeline.
GetCachedMessages(PipelineId, CachedConsoleMessageTypes, IpcSender<Vec<CachedConsoleMessage>>),
/// Update a given node's attributes with a list of modifications.
ModifyAttribute(PipelineId, String, Vec<Modification>),
/// Request live console messages for a given pipeline (true if desired, false otherwise).
WantsLiveNotifications(PipelineId, bool),
/// Request live notifications for a given set of timeline events for a given pipeline.
SetTimelineMarkers(PipelineId, Vec<TimelineMarkerType>, IpcSender<TimelineMarker>),
/// Withdraw request for live timeline notifications for a given pipeline.
DropTimelineMarkers(PipelineId, Vec<TimelineMarkerType>),
/// Request a callback directed at the given actor name from the next animation frame
/// executed in the given pipeline.
RequestAnimationFrame(PipelineId, String),
}
#[derive(Deserialize, Serialize)]
pub struct Modification {
pub attributeName: String,
pub newValue: Option<String>,
}
#[derive(Clone, Deserialize, Serialize)]
pub enum LogLevel {
Log,
Debug,
Info,
Warn,
Error,
}
#[derive(Clone, Deserialize, Serialize)]
pub struct ConsoleMessage {
pub message: String,
pub logLevel: LogLevel,
pub filename: String,
pub lineNumber: usize,
pub columnNumber: usize,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags CachedConsoleMessageTypes: u8 {
const PAGE_ERROR = 1 << 0,
const CONSOLE_API = 1 << 1,
}
}
#[derive(Deserialize, Serialize)]
pub struct PageError {
#[serde(rename = "type")]
pub type_: String,
pub errorMessage: String,
pub sourceName: String,
pub lineText: String,
pub lineNumber: u32,
pub columnNumber: u32,
pub category: String,
pub timeStamp: u64,
pub error: bool,
pub warning: bool,
pub exception: bool,
pub strict: bool,
pub private: bool,
}
#[derive(Deserialize, Serialize)]
pub struct ConsoleAPI {
#[serde(rename = "type")]
pub type_: String,
pub level: String,
pub filename: String,
pub lineNumber: u32,
pub functionName: String,
pub timeStamp: u64,
pub private: bool,
pub arguments: Vec<String>,
}
#[derive(Deserialize, Serialize)]
pub enum | {
PageError(PageError),
ConsoleAPI(ConsoleAPI),
}
#[derive(Debug, PartialEq)]
pub struct HttpRequest {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub body: Option<Vec<u8>>,
pub pipeline_id: PipelineId,
pub startedDateTime: Tm
}
#[derive(Debug, PartialEq)]
pub struct HttpResponse {
pub headers: Option<Headers>,
pub status: Option<RawStatus>,
pub body: Option<Vec<u8>>,
pub pipeline_id: PipelineId,
}
pub enum NetworkEvent {
HttpRequest(HttpRequest),
HttpResponse(HttpResponse),
}
impl TimelineMarker {
pub fn start(name: String) -> StartedTimelineMarker {
StartedTimelineMarker {
name: name,
start_time: PreciseTime::now(),
start_stack: None,
}
}
}
impl StartedTimelineMarker {
pub fn end(self) -> TimelineMarker {
TimelineMarker {
name: self.name,
start_time: self.start_time,
start_stack: self.start_stack,
end_time: PreciseTime::now(),
end_stack: None,
}
}
}
/// A replacement for `time::PreciseTime` that isn't opaque, so we can serialize it.
///
/// The reason why this doesn't go upstream is that `time` is slated to be part of Rust's standard
/// library, which definitely can't have any dependencies on `serde`. But `serde` can't implement
/// `Deserialize` and `Serialize` itself, because `time::PreciseTime` is opaque! A Catch-22. So I'm
/// duplicating the definition here.
#[derive(Copy, Clone, Deserialize, Serialize)]
pub struct PreciseTime(u64);
impl PreciseTime {
pub fn now() -> PreciseTime {
PreciseTime(time::precise_time_ns())
}
pub fn to(&self, later: PreciseTime) -> Duration {
Duration::nanoseconds((later.0 - self.0) as i64)
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct WorkerId(pub u32);
| CachedConsoleMessage | identifier_name |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! This module contains shared types and messages for use by devtools/script.
//! The traits are here instead of in script so that the devtools crate can be
//! modified independently of the rest of Servo.
#![crate_name = "devtools_traits"]
#![crate_type = "rlib"]
#![allow(non_snake_case)]
#![deny(unsafe_code)]
#![feature(custom_derive, plugin)]
#![plugin(heapsize_plugin, serde_macros)]
| extern crate ipc_channel;
extern crate msg;
extern crate serde;
extern crate time;
extern crate url;
extern crate util;
use hyper::header::Headers;
use hyper::http::RawStatus;
use hyper::method::Method;
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::PipelineId;
use std::net::TcpStream;
use time::Duration;
use time::Tm;
use url::Url;
// Information would be attached to NewGlobal to be received and show in devtools.
// Extend these fields if we need more information.
#[derive(Deserialize, Serialize)]
pub struct DevtoolsPageInfo {
pub title: String,
pub url: Url
}
#[derive(Deserialize, HeapSizeOf, Serialize, Clone)]
pub struct CSSError {
pub filename: String,
pub line: usize,
pub column: usize,
pub msg: String
}
/// Messages to instruct the devtools server to update its known actors/state
/// according to changes in the browser.
pub enum DevtoolsControlMsg {
/// Messages from threads in the chrome process (resource/constellation/devtools)
FromChrome(ChromeToDevtoolsControlMsg),
/// Messages from script threads
FromScript(ScriptToDevtoolsControlMsg),
}
/// Events that the devtools server must act upon.
pub enum ChromeToDevtoolsControlMsg {
/// A new client has connected to the server.
AddClient(TcpStream),
/// The browser is shutting down.
ServerExitMsg,
/// A network event occurred (request, reply, etc.). The actor with the
/// provided name should be notified.
NetworkEvent(String, NetworkEvent),
}
#[derive(Deserialize, Serialize)]
/// Events that the devtools server must act upon.
pub enum ScriptToDevtoolsControlMsg {
/// A new global object was created, associated with a particular pipeline.
/// The means of communicating directly with it are provided.
NewGlobal((PipelineId, Option<WorkerId>),
IpcSender<DevtoolScriptControlMsg>,
DevtoolsPageInfo),
/// A particular page has invoked the console API.
ConsoleAPI(PipelineId, ConsoleMessage, Option<WorkerId>),
/// An animation frame with the given timestamp was processed in a script thread.
/// The actor with the provided name should be notified.
FramerateTick(String, f64),
/// Report a CSS parse error for the given pipeline
ReportCSSError(PipelineId, CSSError),
}
/// Serialized JS return values
/// TODO: generalize this beyond the EvaluateJS message?
#[derive(Deserialize, Serialize)]
pub enum EvaluateJSReply {
VoidValue,
NullValue,
BooleanValue(bool),
NumberValue(f64),
StringValue(String),
ActorValue { class: String, uuid: String },
}
#[derive(Deserialize, Serialize)]
pub struct AttrInfo {
pub namespace: String,
pub name: String,
pub value: String,
}
#[derive(Deserialize, Serialize)]
pub struct NodeInfo {
pub uniqueId: String,
pub baseURI: String,
pub parent: String,
pub nodeType: u16,
pub namespaceURI: String,
pub nodeName: String,
pub numChildren: usize,
pub name: String,
pub publicId: String,
pub systemId: String,
pub attrs: Vec<AttrInfo>,
pub isDocumentElement: bool,
pub shortValue: String,
pub incompleteValue: bool,
}
pub struct StartedTimelineMarker {
name: String,
start_time: PreciseTime,
start_stack: Option<Vec<()>>,
}
#[derive(Deserialize, Serialize)]
pub struct TimelineMarker {
pub name: String,
pub start_time: PreciseTime,
pub start_stack: Option<Vec<()>>,
pub end_time: PreciseTime,
pub end_stack: Option<Vec<()>>,
}
#[derive(PartialEq, Eq, Hash, Clone, Deserialize, Serialize)]
pub enum TimelineMarkerType {
Reflow,
DOMEvent,
}
/// The properties of a DOM node as computed by layout.
#[derive(Deserialize, Serialize)]
pub struct ComputedNodeLayout {
pub display: String,
pub position: String,
pub zIndex: String,
pub boxSizing: String,
pub autoMargins: AutoMargins,
pub marginTop: String,
pub marginRight: String,
pub marginBottom: String,
pub marginLeft: String,
pub borderTopWidth: String,
pub borderRightWidth: String,
pub borderBottomWidth: String,
pub borderLeftWidth: String,
pub paddingTop: String,
pub paddingRight: String,
pub paddingBottom: String,
pub paddingLeft: String,
pub width: f32,
pub height: f32,
}
#[derive(Deserialize, Serialize)]
pub struct AutoMargins {
pub top: bool,
pub right: bool,
pub bottom: bool,
pub left: bool,
}
/// Messages to process in a particular script thread, as instructed by a devtools client.
#[derive(Deserialize, Serialize)]
pub enum DevtoolScriptControlMsg {
/// Evaluate a JS snippet in the context of the global for the given pipeline.
EvaluateJS(PipelineId, String, IpcSender<EvaluateJSReply>),
/// Retrieve the details of the root node (ie. the document) for the given pipeline.
GetRootNode(PipelineId, IpcSender<NodeInfo>),
/// Retrieve the details of the document element for the given pipeline.
GetDocumentElement(PipelineId, IpcSender<NodeInfo>),
/// Retrieve the details of the child nodes of the given node in the given pipeline.
GetChildren(PipelineId, String, IpcSender<Vec<NodeInfo>>),
/// Retrieve the computed layout properties of the given node in the given pipeline.
GetLayout(PipelineId, String, IpcSender<ComputedNodeLayout>),
/// Retrieve all stored console messages for the given pipeline.
GetCachedMessages(PipelineId, CachedConsoleMessageTypes, IpcSender<Vec<CachedConsoleMessage>>),
/// Update a given node's attributes with a list of modifications.
ModifyAttribute(PipelineId, String, Vec<Modification>),
/// Request live console messages for a given pipeline (true if desired, false otherwise).
WantsLiveNotifications(PipelineId, bool),
/// Request live notifications for a given set of timeline events for a given pipeline.
SetTimelineMarkers(PipelineId, Vec<TimelineMarkerType>, IpcSender<TimelineMarker>),
/// Withdraw request for live timeline notifications for a given pipeline.
DropTimelineMarkers(PipelineId, Vec<TimelineMarkerType>),
/// Request a callback directed at the given actor name from the next animation frame
/// executed in the given pipeline.
RequestAnimationFrame(PipelineId, String),
}
#[derive(Deserialize, Serialize)]
pub struct Modification {
pub attributeName: String,
pub newValue: Option<String>,
}
#[derive(Clone, Deserialize, Serialize)]
pub enum LogLevel {
Log,
Debug,
Info,
Warn,
Error,
}
#[derive(Clone, Deserialize, Serialize)]
pub struct ConsoleMessage {
pub message: String,
pub logLevel: LogLevel,
pub filename: String,
pub lineNumber: usize,
pub columnNumber: usize,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags CachedConsoleMessageTypes: u8 {
const PAGE_ERROR = 1 << 0,
const CONSOLE_API = 1 << 1,
}
}
#[derive(Deserialize, Serialize)]
pub struct PageError {
#[serde(rename = "type")]
pub type_: String,
pub errorMessage: String,
pub sourceName: String,
pub lineText: String,
pub lineNumber: u32,
pub columnNumber: u32,
pub category: String,
pub timeStamp: u64,
pub error: bool,
pub warning: bool,
pub exception: bool,
pub strict: bool,
pub private: bool,
}
#[derive(Deserialize, Serialize)]
pub struct ConsoleAPI {
#[serde(rename = "type")]
pub type_: String,
pub level: String,
pub filename: String,
pub lineNumber: u32,
pub functionName: String,
pub timeStamp: u64,
pub private: bool,
pub arguments: Vec<String>,
}
#[derive(Deserialize, Serialize)]
pub enum CachedConsoleMessage {
PageError(PageError),
ConsoleAPI(ConsoleAPI),
}
#[derive(Debug, PartialEq)]
pub struct HttpRequest {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub body: Option<Vec<u8>>,
pub pipeline_id: PipelineId,
pub startedDateTime: Tm
}
#[derive(Debug, PartialEq)]
pub struct HttpResponse {
pub headers: Option<Headers>,
pub status: Option<RawStatus>,
pub body: Option<Vec<u8>>,
pub pipeline_id: PipelineId,
}
pub enum NetworkEvent {
HttpRequest(HttpRequest),
HttpResponse(HttpResponse),
}
impl TimelineMarker {
pub fn start(name: String) -> StartedTimelineMarker {
StartedTimelineMarker {
name: name,
start_time: PreciseTime::now(),
start_stack: None,
}
}
}
impl StartedTimelineMarker {
pub fn end(self) -> TimelineMarker {
TimelineMarker {
name: self.name,
start_time: self.start_time,
start_stack: self.start_stack,
end_time: PreciseTime::now(),
end_stack: None,
}
}
}
/// A replacement for `time::PreciseTime` that isn't opaque, so we can serialize it.
///
/// The reason why this doesn't go upstream is that `time` is slated to be part of Rust's standard
/// library, which definitely can't have any dependencies on `serde`. But `serde` can't implement
/// `Deserialize` and `Serialize` itself, because `time::PreciseTime` is opaque! A Catch-22. So I'm
/// duplicating the definition here.
#[derive(Copy, Clone, Deserialize, Serialize)]
pub struct PreciseTime(u64);
impl PreciseTime {
pub fn now() -> PreciseTime {
PreciseTime(time::precise_time_ns())
}
pub fn to(&self, later: PreciseTime) -> Duration {
Duration::nanoseconds((later.0 - self.0) as i64)
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct WorkerId(pub u32); | #[macro_use]
extern crate bitflags;
extern crate heapsize;
extern crate hyper; | random_line_split |
zero.rs | #![feature(core, zero_one)]
extern crate core;
#[cfg(test)]
mod tests {
use core::num::Zero;
// pub trait Zero {
// /// The "zero" (usually, additive identity) for this type.
// fn zero() -> Self;
// }
// pub trait One {
// /// The "one" (usually, multiplicative identity) for this type.
// fn one() -> Self;
// }
// macro_rules! zero_one_impl {
// ($($t:ty)*) => ($(
// impl Zero for $t {
// #[inline]
// fn zero() -> Self { 0 }
// }
// impl One for $t {
// #[inline]
// fn one() -> Self { 1 }
// }
// )*)
// }
// zero_one_impl! { u8 u16 u32 u64 usize i8 i16 i32 i64 isize }
// macro_rules! zero_one_impl_float {
// ($($t:ty)*) => ($(
// impl Zero for $t {
// #[inline]
// fn zero() -> Self { 0.0 }
// }
// impl One for $t {
// #[inline]
// fn one() -> Self { 1.0 }
// }
// )*)
// }
// zero_one_impl_float! { f32 f64 }
type T = u64;
#[test]
fn | () {
let value: T = T::zero();
assert_eq!(value, 0x0000000000000000);
}
}
| zero_test1 | identifier_name |
zero.rs | #![feature(core, zero_one)]
extern crate core;
#[cfg(test)]
mod tests {
use core::num::Zero;
// pub trait Zero {
// /// The "zero" (usually, additive identity) for this type.
// fn zero() -> Self;
// }
// pub trait One {
// /// The "one" (usually, multiplicative identity) for this type.
// fn one() -> Self;
// }
// macro_rules! zero_one_impl {
// ($($t:ty)*) => ($(
// impl Zero for $t {
// #[inline]
// fn zero() -> Self { 0 } | // impl One for $t {
// #[inline]
// fn one() -> Self { 1 }
// }
// )*)
// }
// zero_one_impl! { u8 u16 u32 u64 usize i8 i16 i32 i64 isize }
// macro_rules! zero_one_impl_float {
// ($($t:ty)*) => ($(
// impl Zero for $t {
// #[inline]
// fn zero() -> Self { 0.0 }
// }
// impl One for $t {
// #[inline]
// fn one() -> Self { 1.0 }
// }
// )*)
// }
// zero_one_impl_float! { f32 f64 }
type T = u64;
#[test]
fn zero_test1() {
let value: T = T::zero();
assert_eq!(value, 0x0000000000000000);
}
} | // } | random_line_split |
zero.rs | #![feature(core, zero_one)]
extern crate core;
#[cfg(test)]
mod tests {
use core::num::Zero;
// pub trait Zero {
// /// The "zero" (usually, additive identity) for this type.
// fn zero() -> Self;
// }
// pub trait One {
// /// The "one" (usually, multiplicative identity) for this type.
// fn one() -> Self;
// }
// macro_rules! zero_one_impl {
// ($($t:ty)*) => ($(
// impl Zero for $t {
// #[inline]
// fn zero() -> Self { 0 }
// }
// impl One for $t {
// #[inline]
// fn one() -> Self { 1 }
// }
// )*)
// }
// zero_one_impl! { u8 u16 u32 u64 usize i8 i16 i32 i64 isize }
// macro_rules! zero_one_impl_float {
// ($($t:ty)*) => ($(
// impl Zero for $t {
// #[inline]
// fn zero() -> Self { 0.0 }
// }
// impl One for $t {
// #[inline]
// fn one() -> Self { 1.0 }
// }
// )*)
// }
// zero_one_impl_float! { f32 f64 }
type T = u64;
#[test]
fn zero_test1() |
}
| {
let value: T = T::zero();
assert_eq!(value, 0x0000000000000000);
} | identifier_body |
meta.rs | // Copyright (C) 2015 Steven Allen
//
// This file is part of gazetta.
//
// This program is free software: you can redistribute it and/or modify it under the terms of the
// GNU General Public License as published by the Free Software Foundation version 3 of the
// License.
//
// This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
// without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
// the GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along with this program. If
// not, see <http://www.gnu.org/licenses/>.
//
use gazetta_core::model::Meta;
use gazetta_core::yaml::Hash;
use crate::link::Link;
use crate::person::Person;
use crate::util::BubbleResult;
use crate::yaml::*;
pub struct SourceMeta {
pub nav: Vec<Link>,
pub author: Person,
}
impl Meta for SourceMeta {
fn from_yaml(mut meta: Hash) -> Result<SourceMeta, &'static str> |
}
pub struct EntryMeta {
pub author: Option<Person>,
pub about: Option<Person>,
}
impl Meta for EntryMeta {
fn from_yaml(mut meta: Hash) -> Result<EntryMeta, &'static str> {
Ok(EntryMeta {
author: meta
.remove(&AUTHOR)
.map(Person::from_yaml)
.bubble_result()?,
about: meta.remove(&ABOUT).map(Person::from_yaml).bubble_result()?,
})
}
}
| {
Ok(SourceMeta {
nav: meta
.remove(&NAV)
.map(Link::many_from_yaml)
.bubble_result()?
.unwrap_or_else(Vec::new),
author: meta
.remove(&AUTHOR)
.map(Person::from_yaml)
.bubble_result()?
.ok_or("websites must have authors")?,
})
} | identifier_body |
meta.rs | // Copyright (C) 2015 Steven Allen
//
// This file is part of gazetta.
//
// This program is free software: you can redistribute it and/or modify it under the terms of the
// GNU General Public License as published by the Free Software Foundation version 3 of the
// License.
//
// This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
// without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
// the GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along with this program. If
// not, see <http://www.gnu.org/licenses/>.
//
use gazetta_core::model::Meta;
use gazetta_core::yaml::Hash;
use crate::link::Link;
use crate::person::Person;
use crate::util::BubbleResult;
use crate::yaml::*;
pub struct SourceMeta {
pub nav: Vec<Link>,
pub author: Person,
}
impl Meta for SourceMeta {
fn from_yaml(mut meta: Hash) -> Result<SourceMeta, &'static str> {
Ok(SourceMeta {
nav: meta
.remove(&NAV)
.map(Link::many_from_yaml)
.bubble_result()?
.unwrap_or_else(Vec::new),
author: meta | .remove(&AUTHOR)
.map(Person::from_yaml)
.bubble_result()?
.ok_or("websites must have authors")?,
})
}
}
pub struct EntryMeta {
pub author: Option<Person>,
pub about: Option<Person>,
}
impl Meta for EntryMeta {
fn from_yaml(mut meta: Hash) -> Result<EntryMeta, &'static str> {
Ok(EntryMeta {
author: meta
.remove(&AUTHOR)
.map(Person::from_yaml)
.bubble_result()?,
about: meta.remove(&ABOUT).map(Person::from_yaml).bubble_result()?,
})
}
} | random_line_split |
|
meta.rs | // Copyright (C) 2015 Steven Allen
//
// This file is part of gazetta.
//
// This program is free software: you can redistribute it and/or modify it under the terms of the
// GNU General Public License as published by the Free Software Foundation version 3 of the
// License.
//
// This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
// without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
// the GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along with this program. If
// not, see <http://www.gnu.org/licenses/>.
//
use gazetta_core::model::Meta;
use gazetta_core::yaml::Hash;
use crate::link::Link;
use crate::person::Person;
use crate::util::BubbleResult;
use crate::yaml::*;
pub struct SourceMeta {
pub nav: Vec<Link>,
pub author: Person,
}
impl Meta for SourceMeta {
fn from_yaml(mut meta: Hash) -> Result<SourceMeta, &'static str> {
Ok(SourceMeta {
nav: meta
.remove(&NAV)
.map(Link::many_from_yaml)
.bubble_result()?
.unwrap_or_else(Vec::new),
author: meta
.remove(&AUTHOR)
.map(Person::from_yaml)
.bubble_result()?
.ok_or("websites must have authors")?,
})
}
}
pub struct | {
pub author: Option<Person>,
pub about: Option<Person>,
}
impl Meta for EntryMeta {
fn from_yaml(mut meta: Hash) -> Result<EntryMeta, &'static str> {
Ok(EntryMeta {
author: meta
.remove(&AUTHOR)
.map(Person::from_yaml)
.bubble_result()?,
about: meta.remove(&ABOUT).map(Person::from_yaml).bubble_result()?,
})
}
}
| EntryMeta | identifier_name |
day20.rs | extern crate clap;
extern crate regex;
use clap::App;
fn main() {
let matches = App::new("day20")
.version("v1.0")
.author("Andrew Rink <[email protected]>")
.args_from_usage("<NUM> 'Minimum present number'")
.get_matches();
let num = matches.value_of("NUM").unwrap().parse::<usize>().unwrap();
find_house_number(num);
find_house_number_part2(num);
}
fn find_house_number_part2(presents : usize) {
let sz = presents/10;
let mut v = vec![0; sz];
for i in 1..sz+1 {
let mut j = i;
let mut cnt = 0;
while j <= sz && cnt < 50 {
let entry = v.get_mut(j-1).unwrap();
*entry += i*11;
j += i;
cnt += 1;
}
}
for (i, e) in v.iter().enumerate() {
if *e >= presents {
println!("Part 2: House {} received {} presents", i+1, e);
break;
}
}
}
fn find_house_number(presents : usize) {
let target = presents / 10;
let mut v = vec![0; target];
for i in 1..target+1 {
let mut j = i;
while j <= target {
let entry = v.get_mut(j-1).unwrap();
*entry += i*10;
j += i; |
for (i, e) in v.iter().enumerate() {
if *e >= presents {
println!("Part 1: House {} received {} presents", i+1, e);
break;
}
}
} | }
} | random_line_split |
day20.rs | extern crate clap;
extern crate regex;
use clap::App;
fn main() {
let matches = App::new("day20")
.version("v1.0")
.author("Andrew Rink <[email protected]>")
.args_from_usage("<NUM> 'Minimum present number'")
.get_matches();
let num = matches.value_of("NUM").unwrap().parse::<usize>().unwrap();
find_house_number(num);
find_house_number_part2(num);
}
fn find_house_number_part2(presents : usize) | break;
}
}
}
fn find_house_number(presents : usize) {
let target = presents / 10;
let mut v = vec![0; target];
for i in 1..target+1 {
let mut j = i;
while j <= target {
let entry = v.get_mut(j-1).unwrap();
*entry += i*10;
j += i;
}
}
for (i, e) in v.iter().enumerate() {
if *e >= presents {
println!("Part 1: House {} received {} presents", i+1, e);
break;
}
}
}
| {
let sz = presents/10;
let mut v = vec![0; sz];
for i in 1..sz+1 {
let mut j = i;
let mut cnt = 0;
while j <= sz && cnt < 50 {
let entry = v.get_mut(j-1).unwrap();
*entry += i*11;
j += i;
cnt += 1;
}
}
for (i, e) in v.iter().enumerate() {
if *e >= presents {
println!("Part 2: House {} received {} presents", i+1, e); | identifier_body |
day20.rs | extern crate clap;
extern crate regex;
use clap::App;
fn main() {
let matches = App::new("day20")
.version("v1.0")
.author("Andrew Rink <[email protected]>")
.args_from_usage("<NUM> 'Minimum present number'")
.get_matches();
let num = matches.value_of("NUM").unwrap().parse::<usize>().unwrap();
find_house_number(num);
find_house_number_part2(num);
}
fn | (presents : usize) {
let sz = presents/10;
let mut v = vec![0; sz];
for i in 1..sz+1 {
let mut j = i;
let mut cnt = 0;
while j <= sz && cnt < 50 {
let entry = v.get_mut(j-1).unwrap();
*entry += i*11;
j += i;
cnt += 1;
}
}
for (i, e) in v.iter().enumerate() {
if *e >= presents {
println!("Part 2: House {} received {} presents", i+1, e);
break;
}
}
}
fn find_house_number(presents : usize) {
let target = presents / 10;
let mut v = vec![0; target];
for i in 1..target+1 {
let mut j = i;
while j <= target {
let entry = v.get_mut(j-1).unwrap();
*entry += i*10;
j += i;
}
}
for (i, e) in v.iter().enumerate() {
if *e >= presents {
println!("Part 1: House {} received {} presents", i+1, e);
break;
}
}
}
| find_house_number_part2 | identifier_name |
day20.rs | extern crate clap;
extern crate regex;
use clap::App;
fn main() {
let matches = App::new("day20")
.version("v1.0")
.author("Andrew Rink <[email protected]>")
.args_from_usage("<NUM> 'Minimum present number'")
.get_matches();
let num = matches.value_of("NUM").unwrap().parse::<usize>().unwrap();
find_house_number(num);
find_house_number_part2(num);
}
fn find_house_number_part2(presents : usize) {
let sz = presents/10;
let mut v = vec![0; sz];
for i in 1..sz+1 {
let mut j = i;
let mut cnt = 0;
while j <= sz && cnt < 50 {
let entry = v.get_mut(j-1).unwrap();
*entry += i*11;
j += i;
cnt += 1;
}
}
for (i, e) in v.iter().enumerate() {
if *e >= presents |
}
}
fn find_house_number(presents : usize) {
let target = presents / 10;
let mut v = vec![0; target];
for i in 1..target+1 {
let mut j = i;
while j <= target {
let entry = v.get_mut(j-1).unwrap();
*entry += i*10;
j += i;
}
}
for (i, e) in v.iter().enumerate() {
if *e >= presents {
println!("Part 1: House {} received {} presents", i+1, e);
break;
}
}
}
| {
println!("Part 2: House {} received {} presents", i+1, e);
break;
} | conditional_block |
parser.rs | /* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use std;
fn | (input: &str) -> Result<u32, std::num::ParseIntError> {
input.parse::<u32>()
}
named!(pub parse_message<String>,
do_parse!(
len: map_res!(
map_res!(take_until!(":"), std::str::from_utf8), parse_len) >>
_sep: take!(1) >>
msg: take_str!(len) >>
(
msg.to_string()
)
));
#[cfg(test)]
mod tests {
use nom::*;
use super::*;
/// Simple test of some valid data.
#[test]
fn test_parse_valid() {
let buf = b"12:Hello World!4:Bye.";
let result = parse_message(buf);
match result {
Ok((remainder, message)) => {
// Check the first message.
assert_eq!(message, "Hello World!");
// And we should have 6 bytes left.
assert_eq!(remainder.len(), 6);
}
Err(Err::Incomplete(_)) => {
panic!("Result should not have been incomplete.");
}
Err(Err::Error(err)) |
Err(Err::Failure(err)) => {
panic!("Result should not be an error: {:?}.", err);
}
}
}
}
| parse_len | identifier_name |
parser.rs | /* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use std;
fn parse_len(input: &str) -> Result<u32, std::num::ParseIntError> {
input.parse::<u32>()
}
named!(pub parse_message<String>,
do_parse!(
len: map_res!(
map_res!(take_until!(":"), std::str::from_utf8), parse_len) >>
_sep: take!(1) >>
msg: take_str!(len) >>
(
msg.to_string()
)
));
#[cfg(test)]
mod tests {
use nom::*;
use super::*;
/// Simple test of some valid data.
#[test]
fn test_parse_valid() | }
}
| {
let buf = b"12:Hello World!4:Bye.";
let result = parse_message(buf);
match result {
Ok((remainder, message)) => {
// Check the first message.
assert_eq!(message, "Hello World!");
// And we should have 6 bytes left.
assert_eq!(remainder.len(), 6);
}
Err(Err::Incomplete(_)) => {
panic!("Result should not have been incomplete.");
}
Err(Err::Error(err)) |
Err(Err::Failure(err)) => {
panic!("Result should not be an error: {:?}.", err);
}
} | identifier_body |
parser.rs | /* Copyright (C) 2018 Open Information Security Foundation
* | * the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use std;
fn parse_len(input: &str) -> Result<u32, std::num::ParseIntError> {
input.parse::<u32>()
}
named!(pub parse_message<String>,
do_parse!(
len: map_res!(
map_res!(take_until!(":"), std::str::from_utf8), parse_len) >>
_sep: take!(1) >>
msg: take_str!(len) >>
(
msg.to_string()
)
));
#[cfg(test)]
mod tests {
use nom::*;
use super::*;
/// Simple test of some valid data.
#[test]
fn test_parse_valid() {
let buf = b"12:Hello World!4:Bye.";
let result = parse_message(buf);
match result {
Ok((remainder, message)) => {
// Check the first message.
assert_eq!(message, "Hello World!");
// And we should have 6 bytes left.
assert_eq!(remainder.len(), 6);
}
Err(Err::Incomplete(_)) => {
panic!("Result should not have been incomplete.");
}
Err(Err::Error(err)) |
Err(Err::Failure(err)) => {
panic!("Result should not be an error: {:?}.", err);
}
}
}
} | * You can copy, redistribute or modify this Program under the terms of | random_line_split |
proxyhandler.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities for the implementation of JSAPI proxy handlers.
#![deny(missing_docs)]
use dom::bindings::conversions::is_dom_proxy;
use dom::bindings::utils::delete_property_by_id;
use js::jsapi::{JSContext, jsid, JSPropertyDescriptor, JSObject, JSString};
use js::jsapi::{JS_GetPropertyDescriptorById, JS_NewStringCopyN};
use js::jsapi::{JS_DefinePropertyById, JS_NewObjectWithGivenProto};
use js::jsapi::{JS_ReportErrorFlagsAndNumber, JS_StrictPropertyStub};
use js::jsapi::{JSREPORT_WARNING, JSREPORT_STRICT, JSREPORT_STRICT_MODE_ERROR};
use js::jsval::ObjectValue;
use js::glue::GetProxyExtra;
use js::glue::{GetObjectProto, GetObjectParent, SetProxyExtra, GetProxyHandler};
use js::glue::InvokeGetOwnPropertyDescriptor;
use js::glue::RUST_js_GetErrorMessage;
use js::glue::AutoIdVector;
use js::{JSPROP_GETTER, JSPROP_ENUMERATE, JSPROP_READONLY, JSRESOLVE_QUALIFIED};
use libc;
use std::mem;
use std::ptr;
static JSPROXYSLOT_EXPANDO: u32 = 0;
/// Invoke the [[GetOwnProperty]] trap (`getOwnPropertyDescriptor`) on `proxy`,
/// with argument `id` and return the result, if it is not `undefined`.
/// Otherwise, walk along the prototype chain to find a property with that
/// name.
pub unsafe extern fn get_property_descriptor(cx: *mut JSContext,
proxy: *mut JSObject,
id: jsid, set: bool,
desc: *mut JSPropertyDescriptor)
-> bool {
let handler = GetProxyHandler(proxy);
if!InvokeGetOwnPropertyDescriptor(handler, cx, proxy, id, set, desc) {
return false;
}
if!(*desc).obj.is_null() {
return true;
}
//let proto = JS_GetPrototype(proxy);
let proto = GetObjectProto(proxy);
if proto.is_null() {
(*desc).obj = ptr::null_mut();
return true;
}
JS_GetPropertyDescriptorById(cx, proto, id, JSRESOLVE_QUALIFIED, desc)!= 0
}
/// Defines an expando on the given `proxy`.
pub unsafe extern fn define_property(cx: *mut JSContext, proxy: *mut JSObject,
id: jsid, desc: *mut JSPropertyDescriptor)
-> bool {
static JSMSG_GETTER_ONLY: libc::c_uint = 160;
//FIXME: Workaround for https://github.com/mozilla/rust/issues/13385
let setter: *const libc::c_void = mem::transmute((*desc).setter);
let setter_stub: *const libc::c_void = mem::transmute(JS_StrictPropertyStub);
if ((*desc).attrs & JSPROP_GETTER)!= 0 && setter == setter_stub {
return JS_ReportErrorFlagsAndNumber(cx,
JSREPORT_WARNING | JSREPORT_STRICT |
JSREPORT_STRICT_MODE_ERROR,
Some(RUST_js_GetErrorMessage), ptr::null_mut(),
JSMSG_GETTER_ONLY)!= 0;
}
let expando = ensure_expando_object(cx, proxy);
return JS_DefinePropertyById(cx, expando, id, (*desc).value, (*desc).getter,
(*desc).setter, (*desc).attrs)!= 0;
}
/// Deletes an expando off the given `proxy`.
pub unsafe extern fn delete(cx: *mut JSContext, proxy: *mut JSObject, id: jsid,
bp: *mut bool) -> bool {
let expando = get_expando_object(proxy);
if expando.is_null() {
*bp = true;
return true;
}
return delete_property_by_id(cx, expando, id, &mut *bp);
}
/// Returns the stringification of an object with class `name`.
pub fn object_to_string(cx: *mut JSContext, name: &str) -> *mut JSString {
unsafe {
let result = format!("[object {}]", name);
let chars = result.as_ptr() as *const libc::c_char;
let length = result.len() as libc::size_t;
let string = JS_NewStringCopyN(cx, chars, length);
assert!(!string.is_null());
return string;
}
}
/// Get the expando object, or null if there is none.
pub fn get_expando_object(obj: *mut JSObject) -> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj));
let val = GetProxyExtra(obj, JSPROXYSLOT_EXPANDO);
if val.is_undefined() {
ptr::null_mut()
} else {
val.to_object()
}
}
}
/// Get the expando object, or create it if it doesn't exist yet.
/// Fails on JSAPI failure.
pub fn ensure_expando_object(cx: *mut JSContext, obj: *mut JSObject)
-> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj));
let mut expando = get_expando_object(obj);
if expando.is_null() {
expando = JS_NewObjectWithGivenProto(cx, ptr::null_mut(),
ptr::null_mut(),
GetObjectParent(obj));
assert!(!expando.is_null());
SetProxyExtra(obj, JSPROXYSLOT_EXPANDO, ObjectValue(&*expando));
}
return expando;
}
}
/// Set the property descriptor's object to `obj` and set it to enumerable,
/// and writable if `readonly` is true.
pub fn fill_property_descriptor(desc: &mut JSPropertyDescriptor,
obj: *mut JSObject, readonly: bool) {
desc.obj = obj;
desc.attrs = if readonly { JSPROP_READONLY } else { 0 } | JSPROP_ENUMERATE;
desc.getter = None;
desc.setter = None;
desc.shortid = 0;
}
/// No-op required hook.
pub unsafe extern fn get_own_property_names(_cx: *mut JSContext,
_obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true | }
/// No-op required hook.
pub unsafe extern fn enumerate(_cx: *mut JSContext, _obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true
} | random_line_split |
|
proxyhandler.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities for the implementation of JSAPI proxy handlers.
#![deny(missing_docs)]
use dom::bindings::conversions::is_dom_proxy;
use dom::bindings::utils::delete_property_by_id;
use js::jsapi::{JSContext, jsid, JSPropertyDescriptor, JSObject, JSString};
use js::jsapi::{JS_GetPropertyDescriptorById, JS_NewStringCopyN};
use js::jsapi::{JS_DefinePropertyById, JS_NewObjectWithGivenProto};
use js::jsapi::{JS_ReportErrorFlagsAndNumber, JS_StrictPropertyStub};
use js::jsapi::{JSREPORT_WARNING, JSREPORT_STRICT, JSREPORT_STRICT_MODE_ERROR};
use js::jsval::ObjectValue;
use js::glue::GetProxyExtra;
use js::glue::{GetObjectProto, GetObjectParent, SetProxyExtra, GetProxyHandler};
use js::glue::InvokeGetOwnPropertyDescriptor;
use js::glue::RUST_js_GetErrorMessage;
use js::glue::AutoIdVector;
use js::{JSPROP_GETTER, JSPROP_ENUMERATE, JSPROP_READONLY, JSRESOLVE_QUALIFIED};
use libc;
use std::mem;
use std::ptr;
static JSPROXYSLOT_EXPANDO: u32 = 0;
/// Invoke the [[GetOwnProperty]] trap (`getOwnPropertyDescriptor`) on `proxy`,
/// with argument `id` and return the result, if it is not `undefined`.
/// Otherwise, walk along the prototype chain to find a property with that
/// name.
pub unsafe extern fn get_property_descriptor(cx: *mut JSContext,
proxy: *mut JSObject,
id: jsid, set: bool,
desc: *mut JSPropertyDescriptor)
-> bool {
let handler = GetProxyHandler(proxy);
if!InvokeGetOwnPropertyDescriptor(handler, cx, proxy, id, set, desc) {
return false;
}
if!(*desc).obj.is_null() {
return true;
}
//let proto = JS_GetPrototype(proxy);
let proto = GetObjectProto(proxy);
if proto.is_null() {
(*desc).obj = ptr::null_mut();
return true;
}
JS_GetPropertyDescriptorById(cx, proto, id, JSRESOLVE_QUALIFIED, desc)!= 0
}
/// Defines an expando on the given `proxy`.
pub unsafe extern fn define_property(cx: *mut JSContext, proxy: *mut JSObject,
id: jsid, desc: *mut JSPropertyDescriptor)
-> bool {
static JSMSG_GETTER_ONLY: libc::c_uint = 160;
//FIXME: Workaround for https://github.com/mozilla/rust/issues/13385
let setter: *const libc::c_void = mem::transmute((*desc).setter);
let setter_stub: *const libc::c_void = mem::transmute(JS_StrictPropertyStub);
if ((*desc).attrs & JSPROP_GETTER)!= 0 && setter == setter_stub {
return JS_ReportErrorFlagsAndNumber(cx,
JSREPORT_WARNING | JSREPORT_STRICT |
JSREPORT_STRICT_MODE_ERROR,
Some(RUST_js_GetErrorMessage), ptr::null_mut(),
JSMSG_GETTER_ONLY)!= 0;
}
let expando = ensure_expando_object(cx, proxy);
return JS_DefinePropertyById(cx, expando, id, (*desc).value, (*desc).getter,
(*desc).setter, (*desc).attrs)!= 0;
}
/// Deletes an expando off the given `proxy`.
pub unsafe extern fn | (cx: *mut JSContext, proxy: *mut JSObject, id: jsid,
bp: *mut bool) -> bool {
let expando = get_expando_object(proxy);
if expando.is_null() {
*bp = true;
return true;
}
return delete_property_by_id(cx, expando, id, &mut *bp);
}
/// Returns the stringification of an object with class `name`.
pub fn object_to_string(cx: *mut JSContext, name: &str) -> *mut JSString {
unsafe {
let result = format!("[object {}]", name);
let chars = result.as_ptr() as *const libc::c_char;
let length = result.len() as libc::size_t;
let string = JS_NewStringCopyN(cx, chars, length);
assert!(!string.is_null());
return string;
}
}
/// Get the expando object, or null if there is none.
pub fn get_expando_object(obj: *mut JSObject) -> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj));
let val = GetProxyExtra(obj, JSPROXYSLOT_EXPANDO);
if val.is_undefined() {
ptr::null_mut()
} else {
val.to_object()
}
}
}
/// Get the expando object, or create it if it doesn't exist yet.
/// Fails on JSAPI failure.
pub fn ensure_expando_object(cx: *mut JSContext, obj: *mut JSObject)
-> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj));
let mut expando = get_expando_object(obj);
if expando.is_null() {
expando = JS_NewObjectWithGivenProto(cx, ptr::null_mut(),
ptr::null_mut(),
GetObjectParent(obj));
assert!(!expando.is_null());
SetProxyExtra(obj, JSPROXYSLOT_EXPANDO, ObjectValue(&*expando));
}
return expando;
}
}
/// Set the property descriptor's object to `obj` and set it to enumerable,
/// and writable if `readonly` is true.
pub fn fill_property_descriptor(desc: &mut JSPropertyDescriptor,
obj: *mut JSObject, readonly: bool) {
desc.obj = obj;
desc.attrs = if readonly { JSPROP_READONLY } else { 0 } | JSPROP_ENUMERATE;
desc.getter = None;
desc.setter = None;
desc.shortid = 0;
}
/// No-op required hook.
pub unsafe extern fn get_own_property_names(_cx: *mut JSContext,
_obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true
}
/// No-op required hook.
pub unsafe extern fn enumerate(_cx: *mut JSContext, _obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true
}
| delete | identifier_name |
proxyhandler.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities for the implementation of JSAPI proxy handlers.
#![deny(missing_docs)]
use dom::bindings::conversions::is_dom_proxy;
use dom::bindings::utils::delete_property_by_id;
use js::jsapi::{JSContext, jsid, JSPropertyDescriptor, JSObject, JSString};
use js::jsapi::{JS_GetPropertyDescriptorById, JS_NewStringCopyN};
use js::jsapi::{JS_DefinePropertyById, JS_NewObjectWithGivenProto};
use js::jsapi::{JS_ReportErrorFlagsAndNumber, JS_StrictPropertyStub};
use js::jsapi::{JSREPORT_WARNING, JSREPORT_STRICT, JSREPORT_STRICT_MODE_ERROR};
use js::jsval::ObjectValue;
use js::glue::GetProxyExtra;
use js::glue::{GetObjectProto, GetObjectParent, SetProxyExtra, GetProxyHandler};
use js::glue::InvokeGetOwnPropertyDescriptor;
use js::glue::RUST_js_GetErrorMessage;
use js::glue::AutoIdVector;
use js::{JSPROP_GETTER, JSPROP_ENUMERATE, JSPROP_READONLY, JSRESOLVE_QUALIFIED};
use libc;
use std::mem;
use std::ptr;
static JSPROXYSLOT_EXPANDO: u32 = 0;
/// Invoke the [[GetOwnProperty]] trap (`getOwnPropertyDescriptor`) on `proxy`,
/// with argument `id` and return the result, if it is not `undefined`.
/// Otherwise, walk along the prototype chain to find a property with that
/// name.
pub unsafe extern fn get_property_descriptor(cx: *mut JSContext,
proxy: *mut JSObject,
id: jsid, set: bool,
desc: *mut JSPropertyDescriptor)
-> bool {
let handler = GetProxyHandler(proxy);
if!InvokeGetOwnPropertyDescriptor(handler, cx, proxy, id, set, desc) {
return false;
}
if!(*desc).obj.is_null() {
return true;
}
//let proto = JS_GetPrototype(proxy);
let proto = GetObjectProto(proxy);
if proto.is_null() {
(*desc).obj = ptr::null_mut();
return true;
}
JS_GetPropertyDescriptorById(cx, proto, id, JSRESOLVE_QUALIFIED, desc)!= 0
}
/// Defines an expando on the given `proxy`.
pub unsafe extern fn define_property(cx: *mut JSContext, proxy: *mut JSObject,
id: jsid, desc: *mut JSPropertyDescriptor)
-> bool {
static JSMSG_GETTER_ONLY: libc::c_uint = 160;
//FIXME: Workaround for https://github.com/mozilla/rust/issues/13385
let setter: *const libc::c_void = mem::transmute((*desc).setter);
let setter_stub: *const libc::c_void = mem::transmute(JS_StrictPropertyStub);
if ((*desc).attrs & JSPROP_GETTER)!= 0 && setter == setter_stub {
return JS_ReportErrorFlagsAndNumber(cx,
JSREPORT_WARNING | JSREPORT_STRICT |
JSREPORT_STRICT_MODE_ERROR,
Some(RUST_js_GetErrorMessage), ptr::null_mut(),
JSMSG_GETTER_ONLY)!= 0;
}
let expando = ensure_expando_object(cx, proxy);
return JS_DefinePropertyById(cx, expando, id, (*desc).value, (*desc).getter,
(*desc).setter, (*desc).attrs)!= 0;
}
/// Deletes an expando off the given `proxy`.
pub unsafe extern fn delete(cx: *mut JSContext, proxy: *mut JSObject, id: jsid,
bp: *mut bool) -> bool {
let expando = get_expando_object(proxy);
if expando.is_null() {
*bp = true;
return true;
}
return delete_property_by_id(cx, expando, id, &mut *bp);
}
/// Returns the stringification of an object with class `name`.
pub fn object_to_string(cx: *mut JSContext, name: &str) -> *mut JSString {
unsafe {
let result = format!("[object {}]", name);
let chars = result.as_ptr() as *const libc::c_char;
let length = result.len() as libc::size_t;
let string = JS_NewStringCopyN(cx, chars, length);
assert!(!string.is_null());
return string;
}
}
/// Get the expando object, or null if there is none.
pub fn get_expando_object(obj: *mut JSObject) -> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj));
let val = GetProxyExtra(obj, JSPROXYSLOT_EXPANDO);
if val.is_undefined() {
ptr::null_mut()
} else {
val.to_object()
}
}
}
/// Get the expando object, or create it if it doesn't exist yet.
/// Fails on JSAPI failure.
pub fn ensure_expando_object(cx: *mut JSContext, obj: *mut JSObject)
-> *mut JSObject |
/// Set the property descriptor's object to `obj` and set it to enumerable,
/// and writable if `readonly` is true.
pub fn fill_property_descriptor(desc: &mut JSPropertyDescriptor,
obj: *mut JSObject, readonly: bool) {
desc.obj = obj;
desc.attrs = if readonly { JSPROP_READONLY } else { 0 } | JSPROP_ENUMERATE;
desc.getter = None;
desc.setter = None;
desc.shortid = 0;
}
/// No-op required hook.
pub unsafe extern fn get_own_property_names(_cx: *mut JSContext,
_obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true
}
/// No-op required hook.
pub unsafe extern fn enumerate(_cx: *mut JSContext, _obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true
}
| {
unsafe {
assert!(is_dom_proxy(obj));
let mut expando = get_expando_object(obj);
if expando.is_null() {
expando = JS_NewObjectWithGivenProto(cx, ptr::null_mut(),
ptr::null_mut(),
GetObjectParent(obj));
assert!(!expando.is_null());
SetProxyExtra(obj, JSPROXYSLOT_EXPANDO, ObjectValue(&*expando));
}
return expando;
}
} | identifier_body |
proxyhandler.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities for the implementation of JSAPI proxy handlers.
#![deny(missing_docs)]
use dom::bindings::conversions::is_dom_proxy;
use dom::bindings::utils::delete_property_by_id;
use js::jsapi::{JSContext, jsid, JSPropertyDescriptor, JSObject, JSString};
use js::jsapi::{JS_GetPropertyDescriptorById, JS_NewStringCopyN};
use js::jsapi::{JS_DefinePropertyById, JS_NewObjectWithGivenProto};
use js::jsapi::{JS_ReportErrorFlagsAndNumber, JS_StrictPropertyStub};
use js::jsapi::{JSREPORT_WARNING, JSREPORT_STRICT, JSREPORT_STRICT_MODE_ERROR};
use js::jsval::ObjectValue;
use js::glue::GetProxyExtra;
use js::glue::{GetObjectProto, GetObjectParent, SetProxyExtra, GetProxyHandler};
use js::glue::InvokeGetOwnPropertyDescriptor;
use js::glue::RUST_js_GetErrorMessage;
use js::glue::AutoIdVector;
use js::{JSPROP_GETTER, JSPROP_ENUMERATE, JSPROP_READONLY, JSRESOLVE_QUALIFIED};
use libc;
use std::mem;
use std::ptr;
static JSPROXYSLOT_EXPANDO: u32 = 0;
/// Invoke the [[GetOwnProperty]] trap (`getOwnPropertyDescriptor`) on `proxy`,
/// with argument `id` and return the result, if it is not `undefined`.
/// Otherwise, walk along the prototype chain to find a property with that
/// name.
pub unsafe extern fn get_property_descriptor(cx: *mut JSContext,
proxy: *mut JSObject,
id: jsid, set: bool,
desc: *mut JSPropertyDescriptor)
-> bool {
let handler = GetProxyHandler(proxy);
if!InvokeGetOwnPropertyDescriptor(handler, cx, proxy, id, set, desc) {
return false;
}
if!(*desc).obj.is_null() {
return true;
}
//let proto = JS_GetPrototype(proxy);
let proto = GetObjectProto(proxy);
if proto.is_null() {
(*desc).obj = ptr::null_mut();
return true;
}
JS_GetPropertyDescriptorById(cx, proto, id, JSRESOLVE_QUALIFIED, desc)!= 0
}
/// Defines an expando on the given `proxy`.
pub unsafe extern fn define_property(cx: *mut JSContext, proxy: *mut JSObject,
id: jsid, desc: *mut JSPropertyDescriptor)
-> bool {
static JSMSG_GETTER_ONLY: libc::c_uint = 160;
//FIXME: Workaround for https://github.com/mozilla/rust/issues/13385
let setter: *const libc::c_void = mem::transmute((*desc).setter);
let setter_stub: *const libc::c_void = mem::transmute(JS_StrictPropertyStub);
if ((*desc).attrs & JSPROP_GETTER)!= 0 && setter == setter_stub {
return JS_ReportErrorFlagsAndNumber(cx,
JSREPORT_WARNING | JSREPORT_STRICT |
JSREPORT_STRICT_MODE_ERROR,
Some(RUST_js_GetErrorMessage), ptr::null_mut(),
JSMSG_GETTER_ONLY)!= 0;
}
let expando = ensure_expando_object(cx, proxy);
return JS_DefinePropertyById(cx, expando, id, (*desc).value, (*desc).getter,
(*desc).setter, (*desc).attrs)!= 0;
}
/// Deletes an expando off the given `proxy`.
pub unsafe extern fn delete(cx: *mut JSContext, proxy: *mut JSObject, id: jsid,
bp: *mut bool) -> bool {
let expando = get_expando_object(proxy);
if expando.is_null() {
*bp = true;
return true;
}
return delete_property_by_id(cx, expando, id, &mut *bp);
}
/// Returns the stringification of an object with class `name`.
pub fn object_to_string(cx: *mut JSContext, name: &str) -> *mut JSString {
unsafe {
let result = format!("[object {}]", name);
let chars = result.as_ptr() as *const libc::c_char;
let length = result.len() as libc::size_t;
let string = JS_NewStringCopyN(cx, chars, length);
assert!(!string.is_null());
return string;
}
}
/// Get the expando object, or null if there is none.
pub fn get_expando_object(obj: *mut JSObject) -> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj));
let val = GetProxyExtra(obj, JSPROXYSLOT_EXPANDO);
if val.is_undefined() {
ptr::null_mut()
} else {
val.to_object()
}
}
}
/// Get the expando object, or create it if it doesn't exist yet.
/// Fails on JSAPI failure.
pub fn ensure_expando_object(cx: *mut JSContext, obj: *mut JSObject)
-> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj));
let mut expando = get_expando_object(obj);
if expando.is_null() {
expando = JS_NewObjectWithGivenProto(cx, ptr::null_mut(),
ptr::null_mut(),
GetObjectParent(obj));
assert!(!expando.is_null());
SetProxyExtra(obj, JSPROXYSLOT_EXPANDO, ObjectValue(&*expando));
}
return expando;
}
}
/// Set the property descriptor's object to `obj` and set it to enumerable,
/// and writable if `readonly` is true.
pub fn fill_property_descriptor(desc: &mut JSPropertyDescriptor,
obj: *mut JSObject, readonly: bool) {
desc.obj = obj;
desc.attrs = if readonly | else { 0 } | JSPROP_ENUMERATE;
desc.getter = None;
desc.setter = None;
desc.shortid = 0;
}
/// No-op required hook.
pub unsafe extern fn get_own_property_names(_cx: *mut JSContext,
_obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true
}
/// No-op required hook.
pub unsafe extern fn enumerate(_cx: *mut JSContext, _obj: *mut JSObject,
_v: *mut AutoIdVector) -> bool {
true
}
| { JSPROP_READONLY } | conditional_block |
sign.rs | // Copyright 2017-2021 int08h LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//!
//! A multi-step (init-update-finish) interface for Ed25519 signing and verification
//!
use std::fmt;
use std::fmt::Formatter;
use data_encoding::{Encoding, HEXLOWER_PERMISSIVE};
use ring::rand;
use ring::rand::SecureRandom;
use ring::signature::{self, Ed25519KeyPair, KeyPair};
const HEX: Encoding = HEXLOWER_PERMISSIVE;
const INITIAL_BUF_SIZE: usize = 1024;
/// A multi-step (init-update-finish) interface for verifying an Ed25519 signature
#[derive(Debug)]
pub struct Verifier {
pubkey: Vec<u8>,
buf: Vec<u8>,
}
impl Verifier {
pub fn new(pubkey: &[u8]) -> Self {
Verifier {
pubkey: Vec::from(pubkey),
buf: Vec::with_capacity(INITIAL_BUF_SIZE),
}
}
pub fn update(&mut self, data: &[u8]) {
self.buf.reserve(data.len());
self.buf.extend_from_slice(data);
}
pub fn verify(&self, expected_sig: &[u8]) -> bool {
let pk = signature::UnparsedPublicKey::new(&signature::ED25519, &self.pubkey);
match pk.verify(&self.buf, expected_sig) {
Ok(_) => true,
_ => false,
}
}
}
/// A multi-step (init-update-finish) interface for creating an Ed25519 signature
pub struct Signer {
key_pair: Ed25519KeyPair,
buf: Vec<u8>,
}
impl Default for Signer {
fn default() -> Self |
}
impl Signer {
pub fn new() -> Self {
let rng = rand::SystemRandom::new();
let mut seed = [0u8; 32];
rng.fill(&mut seed).unwrap();
Signer::from_seed(&seed)
}
pub fn from_seed(seed: &[u8]) -> Self {
Signer {
key_pair: Ed25519KeyPair::from_seed_unchecked(seed).unwrap(),
buf: Vec::with_capacity(INITIAL_BUF_SIZE),
}
}
pub fn update(&mut self, data: &[u8]) {
self.buf.reserve(data.len());
self.buf.extend_from_slice(data);
}
pub fn sign(&mut self) -> Vec<u8> {
let signature = self.key_pair.sign(&self.buf).as_ref().to_vec();
self.buf.clear();
signature
}
pub fn public_key_bytes(&self) -> &[u8] {
self.key_pair.public_key().as_ref()
}
}
impl fmt::Display for Signer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", HEX.encode(self.public_key_bytes()))
}
}
impl fmt::Debug for Signer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(
f,
"Signer({}, {:?})",
HEX.encode(self.public_key_bytes()),
self.buf
)
}
}
#[rustfmt::skip] // rustfmt errors on the long signature strings
#[cfg(test)]
mod test {
use super::*;
#[test]
fn verify_ed25519_sig_on_empty_message() {
let pubkey = hex::decode(
"d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a",
).unwrap();
let signature = hex::decode(
"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"
).unwrap();
let v = Verifier::new(&pubkey);
let result = v.verify(&signature);
assert_eq!(result, true);
}
#[test]
fn verify_ed25519_sig() {
let pubkey = hex::decode(
"c0dac102c4533186e25dc43128472353eaabdb878b152aeb8e001f92d90233a7",
).unwrap();
let message = hex::decode("5f4c8989").unwrap();
let signature = hex::decode(
"124f6fc6b0d100842769e71bd530664d888df8507df6c56dedfdb509aeb93416e26b918d38aa06305df3095697c18b2aa832eaa52edc0ae49fbae5a85e150c07"
).unwrap();
let mut v = Verifier::new(&pubkey);
v.update(&message);
let result = v.verify(&signature);
assert_eq!(result, true);
}
#[test]
fn sign_ed25519_empty_message() {
let seed = hex::decode("9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60")
.unwrap();
let expected_sig = hex::decode(
"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"
).unwrap();
let mut s = Signer::from_seed(&seed);
let sig = s.sign();
assert_eq!(sig, expected_sig);
}
#[test]
fn sign_ed25519_message() {
let seed = hex::decode("0d4a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9")
.unwrap();
let message = hex::decode("cbc77b").unwrap();
let expected_sig = hex::decode(
"d9868d52c2bebce5f3fa5a79891970f309cb6591e3e1702a70276fa97c24b3a8e58606c38c9758529da50ee31b8219cba45271c689afa60b0ea26c99db19b00c"
).unwrap();
let mut s = Signer::from_seed(&seed);
s.update(&message);
let sig = s.sign();
assert_eq!(sig, expected_sig);
}
#[test]
fn sign_verify_round_trip() {
let seed = hex::decode("334a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9")
.unwrap();
let message = "Hello world".as_bytes();
let mut signer = Signer::from_seed(&seed);
signer.update(&message);
let signature = signer.sign();
let mut v = Verifier::new(signer.public_key_bytes());
v.update(&message);
let result = v.verify(&signature);
assert_eq!(result, true);
}
}
| {
Self::new()
} | identifier_body |
sign.rs | // Copyright 2017-2021 int08h LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//!
//! A multi-step (init-update-finish) interface for Ed25519 signing and verification
//!
use std::fmt;
use std::fmt::Formatter;
use data_encoding::{Encoding, HEXLOWER_PERMISSIVE};
use ring::rand;
use ring::rand::SecureRandom;
use ring::signature::{self, Ed25519KeyPair, KeyPair};
const HEX: Encoding = HEXLOWER_PERMISSIVE;
const INITIAL_BUF_SIZE: usize = 1024;
/// A multi-step (init-update-finish) interface for verifying an Ed25519 signature
#[derive(Debug)]
pub struct Verifier {
pubkey: Vec<u8>,
buf: Vec<u8>,
}
impl Verifier {
pub fn new(pubkey: &[u8]) -> Self {
Verifier {
pubkey: Vec::from(pubkey),
buf: Vec::with_capacity(INITIAL_BUF_SIZE),
}
}
pub fn update(&mut self, data: &[u8]) {
self.buf.reserve(data.len());
self.buf.extend_from_slice(data);
}
pub fn verify(&self, expected_sig: &[u8]) -> bool {
let pk = signature::UnparsedPublicKey::new(&signature::ED25519, &self.pubkey);
match pk.verify(&self.buf, expected_sig) {
Ok(_) => true,
_ => false,
}
}
}
/// A multi-step (init-update-finish) interface for creating an Ed25519 signature
pub struct Signer {
key_pair: Ed25519KeyPair,
buf: Vec<u8>,
}
impl Default for Signer {
fn default() -> Self {
Self::new()
}
}
impl Signer {
pub fn new() -> Self {
let rng = rand::SystemRandom::new();
let mut seed = [0u8; 32];
rng.fill(&mut seed).unwrap();
Signer::from_seed(&seed)
}
pub fn from_seed(seed: &[u8]) -> Self {
Signer {
key_pair: Ed25519KeyPair::from_seed_unchecked(seed).unwrap(),
buf: Vec::with_capacity(INITIAL_BUF_SIZE),
}
}
pub fn update(&mut self, data: &[u8]) {
self.buf.reserve(data.len());
self.buf.extend_from_slice(data);
}
pub fn sign(&mut self) -> Vec<u8> {
let signature = self.key_pair.sign(&self.buf).as_ref().to_vec();
self.buf.clear();
signature
}
pub fn public_key_bytes(&self) -> &[u8] {
self.key_pair.public_key().as_ref()
}
}
impl fmt::Display for Signer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", HEX.encode(self.public_key_bytes()))
}
}
impl fmt::Debug for Signer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(
f,
"Signer({}, {:?})",
HEX.encode(self.public_key_bytes()),
self.buf
)
}
}
#[rustfmt::skip] // rustfmt errors on the long signature strings
#[cfg(test)] | let pubkey = hex::decode(
"d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a",
).unwrap();
let signature = hex::decode(
"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"
).unwrap();
let v = Verifier::new(&pubkey);
let result = v.verify(&signature);
assert_eq!(result, true);
}
#[test]
fn verify_ed25519_sig() {
let pubkey = hex::decode(
"c0dac102c4533186e25dc43128472353eaabdb878b152aeb8e001f92d90233a7",
).unwrap();
let message = hex::decode("5f4c8989").unwrap();
let signature = hex::decode(
"124f6fc6b0d100842769e71bd530664d888df8507df6c56dedfdb509aeb93416e26b918d38aa06305df3095697c18b2aa832eaa52edc0ae49fbae5a85e150c07"
).unwrap();
let mut v = Verifier::new(&pubkey);
v.update(&message);
let result = v.verify(&signature);
assert_eq!(result, true);
}
#[test]
fn sign_ed25519_empty_message() {
let seed = hex::decode("9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60")
.unwrap();
let expected_sig = hex::decode(
"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"
).unwrap();
let mut s = Signer::from_seed(&seed);
let sig = s.sign();
assert_eq!(sig, expected_sig);
}
#[test]
fn sign_ed25519_message() {
let seed = hex::decode("0d4a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9")
.unwrap();
let message = hex::decode("cbc77b").unwrap();
let expected_sig = hex::decode(
"d9868d52c2bebce5f3fa5a79891970f309cb6591e3e1702a70276fa97c24b3a8e58606c38c9758529da50ee31b8219cba45271c689afa60b0ea26c99db19b00c"
).unwrap();
let mut s = Signer::from_seed(&seed);
s.update(&message);
let sig = s.sign();
assert_eq!(sig, expected_sig);
}
#[test]
fn sign_verify_round_trip() {
let seed = hex::decode("334a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9")
.unwrap();
let message = "Hello world".as_bytes();
let mut signer = Signer::from_seed(&seed);
signer.update(&message);
let signature = signer.sign();
let mut v = Verifier::new(signer.public_key_bytes());
v.update(&message);
let result = v.verify(&signature);
assert_eq!(result, true);
}
} | mod test {
use super::*;
#[test]
fn verify_ed25519_sig_on_empty_message() { | random_line_split |
sign.rs | // Copyright 2017-2021 int08h LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//!
//! A multi-step (init-update-finish) interface for Ed25519 signing and verification
//!
use std::fmt;
use std::fmt::Formatter;
use data_encoding::{Encoding, HEXLOWER_PERMISSIVE};
use ring::rand;
use ring::rand::SecureRandom;
use ring::signature::{self, Ed25519KeyPair, KeyPair};
const HEX: Encoding = HEXLOWER_PERMISSIVE;
const INITIAL_BUF_SIZE: usize = 1024;
/// A multi-step (init-update-finish) interface for verifying an Ed25519 signature
#[derive(Debug)]
pub struct Verifier {
pubkey: Vec<u8>,
buf: Vec<u8>,
}
impl Verifier {
pub fn new(pubkey: &[u8]) -> Self {
Verifier {
pubkey: Vec::from(pubkey),
buf: Vec::with_capacity(INITIAL_BUF_SIZE),
}
}
pub fn update(&mut self, data: &[u8]) {
self.buf.reserve(data.len());
self.buf.extend_from_slice(data);
}
pub fn verify(&self, expected_sig: &[u8]) -> bool {
let pk = signature::UnparsedPublicKey::new(&signature::ED25519, &self.pubkey);
match pk.verify(&self.buf, expected_sig) {
Ok(_) => true,
_ => false,
}
}
}
/// A multi-step (init-update-finish) interface for creating an Ed25519 signature
pub struct Signer {
key_pair: Ed25519KeyPair,
buf: Vec<u8>,
}
impl Default for Signer {
fn default() -> Self {
Self::new()
}
}
impl Signer {
pub fn new() -> Self {
let rng = rand::SystemRandom::new();
let mut seed = [0u8; 32];
rng.fill(&mut seed).unwrap();
Signer::from_seed(&seed)
}
pub fn from_seed(seed: &[u8]) -> Self {
Signer {
key_pair: Ed25519KeyPair::from_seed_unchecked(seed).unwrap(),
buf: Vec::with_capacity(INITIAL_BUF_SIZE),
}
}
pub fn update(&mut self, data: &[u8]) {
self.buf.reserve(data.len());
self.buf.extend_from_slice(data);
}
pub fn sign(&mut self) -> Vec<u8> {
let signature = self.key_pair.sign(&self.buf).as_ref().to_vec();
self.buf.clear();
signature
}
pub fn public_key_bytes(&self) -> &[u8] {
self.key_pair.public_key().as_ref()
}
}
impl fmt::Display for Signer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", HEX.encode(self.public_key_bytes()))
}
}
impl fmt::Debug for Signer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(
f,
"Signer({}, {:?})",
HEX.encode(self.public_key_bytes()),
self.buf
)
}
}
#[rustfmt::skip] // rustfmt errors on the long signature strings
#[cfg(test)]
mod test {
use super::*;
#[test]
fn verify_ed25519_sig_on_empty_message() {
let pubkey = hex::decode(
"d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a",
).unwrap();
let signature = hex::decode(
"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"
).unwrap();
let v = Verifier::new(&pubkey);
let result = v.verify(&signature);
assert_eq!(result, true);
}
#[test]
fn verify_ed25519_sig() {
let pubkey = hex::decode(
"c0dac102c4533186e25dc43128472353eaabdb878b152aeb8e001f92d90233a7",
).unwrap();
let message = hex::decode("5f4c8989").unwrap();
let signature = hex::decode(
"124f6fc6b0d100842769e71bd530664d888df8507df6c56dedfdb509aeb93416e26b918d38aa06305df3095697c18b2aa832eaa52edc0ae49fbae5a85e150c07"
).unwrap();
let mut v = Verifier::new(&pubkey);
v.update(&message);
let result = v.verify(&signature);
assert_eq!(result, true);
}
#[test]
fn | () {
let seed = hex::decode("9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60")
.unwrap();
let expected_sig = hex::decode(
"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"
).unwrap();
let mut s = Signer::from_seed(&seed);
let sig = s.sign();
assert_eq!(sig, expected_sig);
}
#[test]
fn sign_ed25519_message() {
let seed = hex::decode("0d4a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9")
.unwrap();
let message = hex::decode("cbc77b").unwrap();
let expected_sig = hex::decode(
"d9868d52c2bebce5f3fa5a79891970f309cb6591e3e1702a70276fa97c24b3a8e58606c38c9758529da50ee31b8219cba45271c689afa60b0ea26c99db19b00c"
).unwrap();
let mut s = Signer::from_seed(&seed);
s.update(&message);
let sig = s.sign();
assert_eq!(sig, expected_sig);
}
#[test]
fn sign_verify_round_trip() {
let seed = hex::decode("334a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9")
.unwrap();
let message = "Hello world".as_bytes();
let mut signer = Signer::from_seed(&seed);
signer.update(&message);
let signature = signer.sign();
let mut v = Verifier::new(signer.public_key_bytes());
v.update(&message);
let result = v.verify(&signature);
assert_eq!(result, true);
}
}
| sign_ed25519_empty_message | identifier_name |
cookie_storage.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implementation of cookie storage as specified in
//! http://tools.ietf.org/html/rfc6265
use net_traits::CookieSource;
use url::Url;
use cookie::Cookie;
use std::cmp::Ordering;
pub struct CookieStorage {
cookies: Vec<Cookie>
}
impl CookieStorage {
pub fn new() -> CookieStorage {
CookieStorage {
cookies: Vec::new()
}
}
| // http://tools.ietf.org/html/rfc6265#section-5.3
pub fn remove(&mut self, cookie: &Cookie, source: CookieSource) -> Result<Option<Cookie>, ()> {
// Step 1
let position = self.cookies.iter().position(|c| {
c.cookie.domain == cookie.cookie.domain &&
c.cookie.path == cookie.cookie.path &&
c.cookie.name == cookie.cookie.name
});
if let Some(ind) = position {
let c = self.cookies.remove(ind);
// http://tools.ietf.org/html/rfc6265#section-5.3 step 11.2
if!c.cookie.httponly || source == CookieSource::HTTP {
Ok(Some(c))
} else {
// Undo the removal.
self.cookies.push(c);
Err(())
}
} else {
Ok(None)
}
}
// http://tools.ietf.org/html/rfc6265#section-5.3
pub fn push(&mut self, mut cookie: Cookie, source: CookieSource) {
let old_cookie = self.remove(&cookie, source);
if old_cookie.is_err() {
// This new cookie is not allowed to overwrite an existing one.
return;
}
if cookie.cookie.value.is_empty() {
return;
}
// Step 11
if let Some(old_cookie) = old_cookie.unwrap() {
// Step 11.3
cookie.creation_time = old_cookie.creation_time;
}
// Step 12
self.cookies.push(cookie);
}
pub fn cookie_comparator(a: &Cookie, b: &Cookie) -> Ordering {
let a_path_len = a.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
let b_path_len = b.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
match a_path_len.cmp(&b_path_len) {
Ordering::Equal => {
let a_creation_time = a.creation_time.to_timespec();
let b_creation_time = b.creation_time.to_timespec();
a_creation_time.cmp(&b_creation_time)
}
// Ensure that longer paths are sorted earlier than shorter paths
Ordering::Greater => Ordering::Less,
Ordering::Less => Ordering::Greater,
}
}
// http://tools.ietf.org/html/rfc6265#section-5.4
pub fn cookies_for_url(&mut self, url: &Url, source: CookieSource) -> Option<String> {
let filterer = |c: &&mut Cookie| -> bool {
info!(" === SENT COOKIE : {} {} {:?} {:?}",
c.cookie.name, c.cookie.value, c.cookie.domain, c.cookie.path);
info!(" === SENT COOKIE RESULT {}", c.appropriate_for_url(url, source));
// Step 1
c.appropriate_for_url(url, source)
};
// Step 2
let mut url_cookies: Vec<&mut Cookie> = self.cookies.iter_mut().filter(filterer).collect();
url_cookies.sort_by(|a, b| CookieStorage::cookie_comparator(*a, *b));
let reducer = |acc: String, c: &mut &mut Cookie| -> String {
// Step 3
c.touch();
// Step 4
(match acc.len() {
0 => acc,
_ => acc + ";"
}) + &c.cookie.name + "=" + &c.cookie.value
};
let result = url_cookies.iter_mut().fold("".to_string(), reducer);
info!(" === COOKIES SENT: {}", result);
match result.len() {
0 => None,
_ => Some(result)
}
}
} | random_line_split |
|
cookie_storage.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implementation of cookie storage as specified in
//! http://tools.ietf.org/html/rfc6265
use net_traits::CookieSource;
use url::Url;
use cookie::Cookie;
use std::cmp::Ordering;
pub struct CookieStorage {
cookies: Vec<Cookie>
}
impl CookieStorage {
pub fn new() -> CookieStorage {
CookieStorage {
cookies: Vec::new()
}
}
// http://tools.ietf.org/html/rfc6265#section-5.3
pub fn remove(&mut self, cookie: &Cookie, source: CookieSource) -> Result<Option<Cookie>, ()> {
// Step 1
let position = self.cookies.iter().position(|c| {
c.cookie.domain == cookie.cookie.domain &&
c.cookie.path == cookie.cookie.path &&
c.cookie.name == cookie.cookie.name
});
if let Some(ind) = position {
let c = self.cookies.remove(ind);
// http://tools.ietf.org/html/rfc6265#section-5.3 step 11.2
if!c.cookie.httponly || source == CookieSource::HTTP {
Ok(Some(c))
} else {
// Undo the removal.
self.cookies.push(c);
Err(())
}
} else {
Ok(None)
}
}
// http://tools.ietf.org/html/rfc6265#section-5.3
pub fn push(&mut self, mut cookie: Cookie, source: CookieSource) {
let old_cookie = self.remove(&cookie, source);
if old_cookie.is_err() {
// This new cookie is not allowed to overwrite an existing one.
return;
}
if cookie.cookie.value.is_empty() {
return;
}
// Step 11
if let Some(old_cookie) = old_cookie.unwrap() {
// Step 11.3
cookie.creation_time = old_cookie.creation_time;
}
// Step 12
self.cookies.push(cookie);
}
pub fn cookie_comparator(a: &Cookie, b: &Cookie) -> Ordering {
let a_path_len = a.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
let b_path_len = b.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
match a_path_len.cmp(&b_path_len) {
Ordering::Equal => {
let a_creation_time = a.creation_time.to_timespec();
let b_creation_time = b.creation_time.to_timespec();
a_creation_time.cmp(&b_creation_time)
}
// Ensure that longer paths are sorted earlier than shorter paths
Ordering::Greater => Ordering::Less,
Ordering::Less => Ordering::Greater,
}
}
// http://tools.ietf.org/html/rfc6265#section-5.4
pub fn | (&mut self, url: &Url, source: CookieSource) -> Option<String> {
let filterer = |c: &&mut Cookie| -> bool {
info!(" === SENT COOKIE : {} {} {:?} {:?}",
c.cookie.name, c.cookie.value, c.cookie.domain, c.cookie.path);
info!(" === SENT COOKIE RESULT {}", c.appropriate_for_url(url, source));
// Step 1
c.appropriate_for_url(url, source)
};
// Step 2
let mut url_cookies: Vec<&mut Cookie> = self.cookies.iter_mut().filter(filterer).collect();
url_cookies.sort_by(|a, b| CookieStorage::cookie_comparator(*a, *b));
let reducer = |acc: String, c: &mut &mut Cookie| -> String {
// Step 3
c.touch();
// Step 4
(match acc.len() {
0 => acc,
_ => acc + ";"
}) + &c.cookie.name + "=" + &c.cookie.value
};
let result = url_cookies.iter_mut().fold("".to_string(), reducer);
info!(" === COOKIES SENT: {}", result);
match result.len() {
0 => None,
_ => Some(result)
}
}
}
| cookies_for_url | identifier_name |
cookie_storage.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implementation of cookie storage as specified in
//! http://tools.ietf.org/html/rfc6265
use net_traits::CookieSource;
use url::Url;
use cookie::Cookie;
use std::cmp::Ordering;
pub struct CookieStorage {
cookies: Vec<Cookie>
}
impl CookieStorage {
pub fn new() -> CookieStorage {
CookieStorage {
cookies: Vec::new()
}
}
// http://tools.ietf.org/html/rfc6265#section-5.3
pub fn remove(&mut self, cookie: &Cookie, source: CookieSource) -> Result<Option<Cookie>, ()> {
// Step 1
let position = self.cookies.iter().position(|c| {
c.cookie.domain == cookie.cookie.domain &&
c.cookie.path == cookie.cookie.path &&
c.cookie.name == cookie.cookie.name
});
if let Some(ind) = position {
let c = self.cookies.remove(ind);
// http://tools.ietf.org/html/rfc6265#section-5.3 step 11.2
if!c.cookie.httponly || source == CookieSource::HTTP {
Ok(Some(c))
} else {
// Undo the removal.
self.cookies.push(c);
Err(())
}
} else {
Ok(None)
}
}
// http://tools.ietf.org/html/rfc6265#section-5.3
pub fn push(&mut self, mut cookie: Cookie, source: CookieSource) {
let old_cookie = self.remove(&cookie, source);
if old_cookie.is_err() {
// This new cookie is not allowed to overwrite an existing one.
return;
}
if cookie.cookie.value.is_empty() |
// Step 11
if let Some(old_cookie) = old_cookie.unwrap() {
// Step 11.3
cookie.creation_time = old_cookie.creation_time;
}
// Step 12
self.cookies.push(cookie);
}
pub fn cookie_comparator(a: &Cookie, b: &Cookie) -> Ordering {
let a_path_len = a.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
let b_path_len = b.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
match a_path_len.cmp(&b_path_len) {
Ordering::Equal => {
let a_creation_time = a.creation_time.to_timespec();
let b_creation_time = b.creation_time.to_timespec();
a_creation_time.cmp(&b_creation_time)
}
// Ensure that longer paths are sorted earlier than shorter paths
Ordering::Greater => Ordering::Less,
Ordering::Less => Ordering::Greater,
}
}
// http://tools.ietf.org/html/rfc6265#section-5.4
pub fn cookies_for_url(&mut self, url: &Url, source: CookieSource) -> Option<String> {
let filterer = |c: &&mut Cookie| -> bool {
info!(" === SENT COOKIE : {} {} {:?} {:?}",
c.cookie.name, c.cookie.value, c.cookie.domain, c.cookie.path);
info!(" === SENT COOKIE RESULT {}", c.appropriate_for_url(url, source));
// Step 1
c.appropriate_for_url(url, source)
};
// Step 2
let mut url_cookies: Vec<&mut Cookie> = self.cookies.iter_mut().filter(filterer).collect();
url_cookies.sort_by(|a, b| CookieStorage::cookie_comparator(*a, *b));
let reducer = |acc: String, c: &mut &mut Cookie| -> String {
// Step 3
c.touch();
// Step 4
(match acc.len() {
0 => acc,
_ => acc + ";"
}) + &c.cookie.name + "=" + &c.cookie.value
};
let result = url_cookies.iter_mut().fold("".to_string(), reducer);
info!(" === COOKIES SENT: {}", result);
match result.len() {
0 => None,
_ => Some(result)
}
}
}
| {
return;
} | conditional_block |
cookie_storage.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implementation of cookie storage as specified in
//! http://tools.ietf.org/html/rfc6265
use net_traits::CookieSource;
use url::Url;
use cookie::Cookie;
use std::cmp::Ordering;
pub struct CookieStorage {
cookies: Vec<Cookie>
}
impl CookieStorage {
pub fn new() -> CookieStorage {
CookieStorage {
cookies: Vec::new()
}
}
// http://tools.ietf.org/html/rfc6265#section-5.3
pub fn remove(&mut self, cookie: &Cookie, source: CookieSource) -> Result<Option<Cookie>, ()> | Ok(None)
}
}
// http://tools.ietf.org/html/rfc6265#section-5.3
pub fn push(&mut self, mut cookie: Cookie, source: CookieSource) {
let old_cookie = self.remove(&cookie, source);
if old_cookie.is_err() {
// This new cookie is not allowed to overwrite an existing one.
return;
}
if cookie.cookie.value.is_empty() {
return;
}
// Step 11
if let Some(old_cookie) = old_cookie.unwrap() {
// Step 11.3
cookie.creation_time = old_cookie.creation_time;
}
// Step 12
self.cookies.push(cookie);
}
pub fn cookie_comparator(a: &Cookie, b: &Cookie) -> Ordering {
let a_path_len = a.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
let b_path_len = b.cookie.path.as_ref().map(|p| p.len()).unwrap_or(0);
match a_path_len.cmp(&b_path_len) {
Ordering::Equal => {
let a_creation_time = a.creation_time.to_timespec();
let b_creation_time = b.creation_time.to_timespec();
a_creation_time.cmp(&b_creation_time)
}
// Ensure that longer paths are sorted earlier than shorter paths
Ordering::Greater => Ordering::Less,
Ordering::Less => Ordering::Greater,
}
}
// http://tools.ietf.org/html/rfc6265#section-5.4
pub fn cookies_for_url(&mut self, url: &Url, source: CookieSource) -> Option<String> {
let filterer = |c: &&mut Cookie| -> bool {
info!(" === SENT COOKIE : {} {} {:?} {:?}",
c.cookie.name, c.cookie.value, c.cookie.domain, c.cookie.path);
info!(" === SENT COOKIE RESULT {}", c.appropriate_for_url(url, source));
// Step 1
c.appropriate_for_url(url, source)
};
// Step 2
let mut url_cookies: Vec<&mut Cookie> = self.cookies.iter_mut().filter(filterer).collect();
url_cookies.sort_by(|a, b| CookieStorage::cookie_comparator(*a, *b));
let reducer = |acc: String, c: &mut &mut Cookie| -> String {
// Step 3
c.touch();
// Step 4
(match acc.len() {
0 => acc,
_ => acc + ";"
}) + &c.cookie.name + "=" + &c.cookie.value
};
let result = url_cookies.iter_mut().fold("".to_string(), reducer);
info!(" === COOKIES SENT: {}", result);
match result.len() {
0 => None,
_ => Some(result)
}
}
}
| {
// Step 1
let position = self.cookies.iter().position(|c| {
c.cookie.domain == cookie.cookie.domain &&
c.cookie.path == cookie.cookie.path &&
c.cookie.name == cookie.cookie.name
});
if let Some(ind) = position {
let c = self.cookies.remove(ind);
// http://tools.ietf.org/html/rfc6265#section-5.3 step 11.2
if !c.cookie.httponly || source == CookieSource::HTTP {
Ok(Some(c))
} else {
// Undo the removal.
self.cookies.push(c);
Err(())
}
} else { | identifier_body |
surface.rs | use {Scalar, TOLERANCE};
use maths::{CrossProduct, DotProduct, UnitVec3D, Vec3D};
/// Represents a `Surface` for a given set of points.
#[derive(Copy, Clone)]
pub struct Surface {
/// The `Surface` normal
pub normal: UnitVec3D,
/// The node indices associated with the `Surface`
pub nodes: [usize; 3],
}
impl Surface {
/// Creates a new `Surface` from the point cloud and indices provided.
pub fn new(vertices: &Vec<Vec3D>, index_0: usize, index_1: usize, index_2: usize) -> Surface {
let reference_point = vertices.iter()
.fold(Vec3D::zero(), |total, &vector| {
total + vector
}) / (vertices.len() as Scalar);
let base = vertices[index_0];
let relative_to_reference = base - reference_point;
let edge_0 = vertices[index_1] - base;
let edge_1 = vertices[index_2] - base;
let mut normal = edge_0.cross(edge_1).normalize();
if normal.dot(relative_to_reference) < TOLERANCE |
return Surface {
normal: normal,
nodes: [index_0, index_1, index_2],
};
}
/// Computes the centroid of a `Surface` using the node indices in the
/// `Surface` and the point cloud provided.
pub fn compute_centroid(surface: &Surface, vertices: &Vec<Vec3D>) -> Vec3D {
return surface.nodes.iter()
.fold(Vec3D::zero(), |total, &index| {
total + vertices[index]
}) / 3.0;
}
}
| {
normal = -normal;
} | conditional_block |
surface.rs | use {Scalar, TOLERANCE};
use maths::{CrossProduct, DotProduct, UnitVec3D, Vec3D};
/// Represents a `Surface` for a given set of points.
#[derive(Copy, Clone)]
pub struct Surface {
/// The `Surface` normal
pub normal: UnitVec3D,
/// The node indices associated with the `Surface`
pub nodes: [usize; 3],
}
impl Surface {
/// Creates a new `Surface` from the point cloud and indices provided.
pub fn new(vertices: &Vec<Vec3D>, index_0: usize, index_1: usize, index_2: usize) -> Surface {
let reference_point = vertices.iter()
.fold(Vec3D::zero(), |total, &vector| {
total + vector
}) / (vertices.len() as Scalar);
let base = vertices[index_0];
let relative_to_reference = base - reference_point;
let edge_0 = vertices[index_1] - base;
let edge_1 = vertices[index_2] - base;
let mut normal = edge_0.cross(edge_1).normalize();
if normal.dot(relative_to_reference) < TOLERANCE {
normal = -normal;
}
return Surface {
normal: normal,
nodes: [index_0, index_1, index_2],
};
}
/// Computes the centroid of a `Surface` using the node indices in the
/// `Surface` and the point cloud provided.
pub fn | (surface: &Surface, vertices: &Vec<Vec3D>) -> Vec3D {
return surface.nodes.iter()
.fold(Vec3D::zero(), |total, &index| {
total + vertices[index]
}) / 3.0;
}
}
| compute_centroid | identifier_name |
surface.rs | use {Scalar, TOLERANCE}; | /// Represents a `Surface` for a given set of points.
#[derive(Copy, Clone)]
pub struct Surface {
/// The `Surface` normal
pub normal: UnitVec3D,
/// The node indices associated with the `Surface`
pub nodes: [usize; 3],
}
impl Surface {
/// Creates a new `Surface` from the point cloud and indices provided.
pub fn new(vertices: &Vec<Vec3D>, index_0: usize, index_1: usize, index_2: usize) -> Surface {
let reference_point = vertices.iter()
.fold(Vec3D::zero(), |total, &vector| {
total + vector
}) / (vertices.len() as Scalar);
let base = vertices[index_0];
let relative_to_reference = base - reference_point;
let edge_0 = vertices[index_1] - base;
let edge_1 = vertices[index_2] - base;
let mut normal = edge_0.cross(edge_1).normalize();
if normal.dot(relative_to_reference) < TOLERANCE {
normal = -normal;
}
return Surface {
normal: normal,
nodes: [index_0, index_1, index_2],
};
}
/// Computes the centroid of a `Surface` using the node indices in the
/// `Surface` and the point cloud provided.
pub fn compute_centroid(surface: &Surface, vertices: &Vec<Vec3D>) -> Vec3D {
return surface.nodes.iter()
.fold(Vec3D::zero(), |total, &index| {
total + vertices[index]
}) / 3.0;
}
} | use maths::{CrossProduct, DotProduct, UnitVec3D, Vec3D};
| random_line_split |
issue-21384.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn | <T : Clone>(arg: T) -> T {
arg.clone()
}
#[derive(PartialEq)]
struct Test(int);
fn main() {
// Check that ranges implement clone
assert!(test(1..5) == (1..5));
assert!(test(..5) == (..5));
assert!(test(1..) == (1..));
assert!(test(FullRange) == (FullRange));
// Check that ranges can still be used with non-clone limits
assert!((Test(1)..Test(5)) == (Test(1)..Test(5)));
}
| test | identifier_name |
issue-21384.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | }
#[derive(PartialEq)]
struct Test(int);
fn main() {
// Check that ranges implement clone
assert!(test(1..5) == (1..5));
assert!(test(..5) == (..5));
assert!(test(1..) == (1..));
assert!(test(FullRange) == (FullRange));
// Check that ranges can still be used with non-clone limits
assert!((Test(1)..Test(5)) == (Test(1)..Test(5)));
} | // option. This file may not be copied, modified, or distributed
// except according to those terms.
fn test<T : Clone>(arg: T) -> T {
arg.clone() | random_line_split |
issue-21384.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn test<T : Clone>(arg: T) -> T {
arg.clone()
}
#[derive(PartialEq)]
struct Test(int);
fn main() | {
// Check that ranges implement clone
assert!(test(1..5) == (1..5));
assert!(test(..5) == (..5));
assert!(test(1..) == (1..));
assert!(test(FullRange) == (FullRange));
// Check that ranges can still be used with non-clone limits
assert!((Test(1)..Test(5)) == (Test(1)..Test(5)));
} | identifier_body |
|
weight.rs | use rand::distributions::{IndependentSample, Normal};
use rand::{Closed01, Rng};
/// Represents a connection weight.
#[derive(Debug, Clone, Copy)]
pub struct Weight(pub f64);
impl Weight {
pub fn inv(self) -> Self {
Weight(-self.0)
}
}
impl Into<f64> for Weight {
fn into(self) -> f64 {
self.0
}
}
impl Into<f32> for Weight {
fn into(self) -> f32 {
self.0 as f32
}
}
/// Represents the range of a connection weight. The range is closed,
/// i.e. including both endpoints [low, high].
#[derive(Debug, Clone, Copy)]
pub struct WeightRange {
high: f64,
low: f64,
}
impl WeightRange {
pub fn new(high: f64, low: f64) -> WeightRange {
assert!(high >= low);
WeightRange {
high: high,
low: low,
}
}
pub fn high(&self) -> Weight {
Weight(self.high)
}
pub fn low(&self) -> Weight {
Weight(self.low)
}
pub fn unipolar(magnitude: f64) -> WeightRange {
if magnitude >= 0.0 {
WeightRange {
high: magnitude,
low: 0.0,
}
} else {
WeightRange {
high: 0.0,
low: magnitude,
}
}
}
pub fn bipolar(magnitude: f64) -> WeightRange {
assert!(magnitude >= 0.0);
WeightRange {
high: magnitude,
low: -magnitude,
}
}
pub fn in_range(&self, weight: Weight) -> bool {
weight.0 >= self.low && weight.0 <= self.high
}
pub fn random_weight<R: Rng>(&self, rng: &mut R) -> Weight {
let w = rng.gen::<Closed01<f64>>().0;
debug_assert!(w >= 0.0 && w <= 1.0);
let weight = Weight(((self.high - self.low) * w) + self.low);
debug_assert!(self.in_range(weight));
weight
}
pub fn clip_weight(&self, weight: Weight) -> Weight {
let clipped = if weight.0 >= self.high {
Weight(self.high)
} else if weight.0 <= self.low {
Weight(self.low)
} else {
weight
};
debug_assert!(self.in_range(clipped));
clipped
}
}
/// Defines a perturbance method.
#[derive(Debug, Clone, Copy)]
pub enum WeightPerturbanceMethod {
JiggleUniform { range: WeightRange },
JiggleGaussian { sigma: f64 },
Random,
}
pub fn | <R: Rng>(sigma: f64, rng: &mut R) -> f64 {
let normal = Normal::new(0.0, sigma);
normal.ind_sample(rng)
}
impl WeightPerturbanceMethod {
pub fn perturb<R: Rng>(
&self,
weight: Weight,
weight_range: &WeightRange,
rng: &mut R,
) -> Weight {
match *self {
WeightPerturbanceMethod::Random => weight_range.random_weight(rng),
WeightPerturbanceMethod::JiggleUniform { range } => {
weight_range.clip_weight(Weight(weight.0 + range.random_weight(rng).0))
}
WeightPerturbanceMethod::JiggleGaussian { sigma } => {
weight_range.clip_weight(Weight(weight.0 + gaussian(sigma, rng)))
}
}
}
}
| gaussian | identifier_name |
weight.rs | use rand::distributions::{IndependentSample, Normal};
use rand::{Closed01, Rng};
/// Represents a connection weight.
#[derive(Debug, Clone, Copy)]
pub struct Weight(pub f64);
impl Weight {
pub fn inv(self) -> Self {
Weight(-self.0)
}
}
impl Into<f64> for Weight {
fn into(self) -> f64 {
self.0
}
}
impl Into<f32> for Weight {
fn into(self) -> f32 {
self.0 as f32
}
}
/// Represents the range of a connection weight. The range is closed,
/// i.e. including both endpoints [low, high].
#[derive(Debug, Clone, Copy)]
pub struct WeightRange {
high: f64,
low: f64,
}
impl WeightRange {
pub fn new(high: f64, low: f64) -> WeightRange {
assert!(high >= low);
WeightRange {
high: high,
low: low,
}
}
pub fn high(&self) -> Weight {
Weight(self.high)
}
pub fn low(&self) -> Weight {
Weight(self.low)
}
pub fn unipolar(magnitude: f64) -> WeightRange {
if magnitude >= 0.0 {
WeightRange {
high: magnitude,
low: 0.0,
}
} else {
WeightRange {
high: 0.0,
low: magnitude,
}
}
}
pub fn bipolar(magnitude: f64) -> WeightRange {
assert!(magnitude >= 0.0);
WeightRange {
high: magnitude,
low: -magnitude,
}
}
pub fn in_range(&self, weight: Weight) -> bool {
weight.0 >= self.low && weight.0 <= self.high
}
pub fn random_weight<R: Rng>(&self, rng: &mut R) -> Weight {
let w = rng.gen::<Closed01<f64>>().0;
debug_assert!(w >= 0.0 && w <= 1.0);
let weight = Weight(((self.high - self.low) * w) + self.low);
debug_assert!(self.in_range(weight));
weight
} | pub fn clip_weight(&self, weight: Weight) -> Weight {
let clipped = if weight.0 >= self.high {
Weight(self.high)
} else if weight.0 <= self.low {
Weight(self.low)
} else {
weight
};
debug_assert!(self.in_range(clipped));
clipped
}
}
/// Defines a perturbance method.
#[derive(Debug, Clone, Copy)]
pub enum WeightPerturbanceMethod {
JiggleUniform { range: WeightRange },
JiggleGaussian { sigma: f64 },
Random,
}
pub fn gaussian<R: Rng>(sigma: f64, rng: &mut R) -> f64 {
let normal = Normal::new(0.0, sigma);
normal.ind_sample(rng)
}
impl WeightPerturbanceMethod {
pub fn perturb<R: Rng>(
&self,
weight: Weight,
weight_range: &WeightRange,
rng: &mut R,
) -> Weight {
match *self {
WeightPerturbanceMethod::Random => weight_range.random_weight(rng),
WeightPerturbanceMethod::JiggleUniform { range } => {
weight_range.clip_weight(Weight(weight.0 + range.random_weight(rng).0))
}
WeightPerturbanceMethod::JiggleGaussian { sigma } => {
weight_range.clip_weight(Weight(weight.0 + gaussian(sigma, rng)))
}
}
}
} | random_line_split |
|
weight.rs | use rand::distributions::{IndependentSample, Normal};
use rand::{Closed01, Rng};
/// Represents a connection weight.
#[derive(Debug, Clone, Copy)]
pub struct Weight(pub f64);
impl Weight {
pub fn inv(self) -> Self {
Weight(-self.0)
}
}
impl Into<f64> for Weight {
fn into(self) -> f64 {
self.0
}
}
impl Into<f32> for Weight {
fn into(self) -> f32 {
self.0 as f32
}
}
/// Represents the range of a connection weight. The range is closed,
/// i.e. including both endpoints [low, high].
#[derive(Debug, Clone, Copy)]
pub struct WeightRange {
high: f64,
low: f64,
}
impl WeightRange {
pub fn new(high: f64, low: f64) -> WeightRange {
assert!(high >= low);
WeightRange {
high: high,
low: low,
}
}
pub fn high(&self) -> Weight {
Weight(self.high)
}
pub fn low(&self) -> Weight {
Weight(self.low)
}
pub fn unipolar(magnitude: f64) -> WeightRange {
if magnitude >= 0.0 {
WeightRange {
high: magnitude,
low: 0.0,
}
} else {
WeightRange {
high: 0.0,
low: magnitude,
}
}
}
pub fn bipolar(magnitude: f64) -> WeightRange {
assert!(magnitude >= 0.0);
WeightRange {
high: magnitude,
low: -magnitude,
}
}
pub fn in_range(&self, weight: Weight) -> bool {
weight.0 >= self.low && weight.0 <= self.high
}
pub fn random_weight<R: Rng>(&self, rng: &mut R) -> Weight {
let w = rng.gen::<Closed01<f64>>().0;
debug_assert!(w >= 0.0 && w <= 1.0);
let weight = Weight(((self.high - self.low) * w) + self.low);
debug_assert!(self.in_range(weight));
weight
}
pub fn clip_weight(&self, weight: Weight) -> Weight {
let clipped = if weight.0 >= self.high {
Weight(self.high)
} else if weight.0 <= self.low {
Weight(self.low)
} else | ;
debug_assert!(self.in_range(clipped));
clipped
}
}
/// Defines a perturbance method.
#[derive(Debug, Clone, Copy)]
pub enum WeightPerturbanceMethod {
JiggleUniform { range: WeightRange },
JiggleGaussian { sigma: f64 },
Random,
}
pub fn gaussian<R: Rng>(sigma: f64, rng: &mut R) -> f64 {
let normal = Normal::new(0.0, sigma);
normal.ind_sample(rng)
}
impl WeightPerturbanceMethod {
pub fn perturb<R: Rng>(
&self,
weight: Weight,
weight_range: &WeightRange,
rng: &mut R,
) -> Weight {
match *self {
WeightPerturbanceMethod::Random => weight_range.random_weight(rng),
WeightPerturbanceMethod::JiggleUniform { range } => {
weight_range.clip_weight(Weight(weight.0 + range.random_weight(rng).0))
}
WeightPerturbanceMethod::JiggleGaussian { sigma } => {
weight_range.clip_weight(Weight(weight.0 + gaussian(sigma, rng)))
}
}
}
}
| {
weight
} | conditional_block |
weight.rs | use rand::distributions::{IndependentSample, Normal};
use rand::{Closed01, Rng};
/// Represents a connection weight.
#[derive(Debug, Clone, Copy)]
pub struct Weight(pub f64);
impl Weight {
pub fn inv(self) -> Self {
Weight(-self.0)
}
}
impl Into<f64> for Weight {
fn into(self) -> f64 {
self.0
}
}
impl Into<f32> for Weight {
fn into(self) -> f32 {
self.0 as f32
}
}
/// Represents the range of a connection weight. The range is closed,
/// i.e. including both endpoints [low, high].
#[derive(Debug, Clone, Copy)]
pub struct WeightRange {
high: f64,
low: f64,
}
impl WeightRange {
pub fn new(high: f64, low: f64) -> WeightRange {
assert!(high >= low);
WeightRange {
high: high,
low: low,
}
}
pub fn high(&self) -> Weight {
Weight(self.high)
}
pub fn low(&self) -> Weight {
Weight(self.low)
}
pub fn unipolar(magnitude: f64) -> WeightRange |
pub fn bipolar(magnitude: f64) -> WeightRange {
assert!(magnitude >= 0.0);
WeightRange {
high: magnitude,
low: -magnitude,
}
}
pub fn in_range(&self, weight: Weight) -> bool {
weight.0 >= self.low && weight.0 <= self.high
}
pub fn random_weight<R: Rng>(&self, rng: &mut R) -> Weight {
let w = rng.gen::<Closed01<f64>>().0;
debug_assert!(w >= 0.0 && w <= 1.0);
let weight = Weight(((self.high - self.low) * w) + self.low);
debug_assert!(self.in_range(weight));
weight
}
pub fn clip_weight(&self, weight: Weight) -> Weight {
let clipped = if weight.0 >= self.high {
Weight(self.high)
} else if weight.0 <= self.low {
Weight(self.low)
} else {
weight
};
debug_assert!(self.in_range(clipped));
clipped
}
}
/// Defines a perturbance method.
#[derive(Debug, Clone, Copy)]
pub enum WeightPerturbanceMethod {
JiggleUniform { range: WeightRange },
JiggleGaussian { sigma: f64 },
Random,
}
pub fn gaussian<R: Rng>(sigma: f64, rng: &mut R) -> f64 {
let normal = Normal::new(0.0, sigma);
normal.ind_sample(rng)
}
impl WeightPerturbanceMethod {
pub fn perturb<R: Rng>(
&self,
weight: Weight,
weight_range: &WeightRange,
rng: &mut R,
) -> Weight {
match *self {
WeightPerturbanceMethod::Random => weight_range.random_weight(rng),
WeightPerturbanceMethod::JiggleUniform { range } => {
weight_range.clip_weight(Weight(weight.0 + range.random_weight(rng).0))
}
WeightPerturbanceMethod::JiggleGaussian { sigma } => {
weight_range.clip_weight(Weight(weight.0 + gaussian(sigma, rng)))
}
}
}
}
| {
if magnitude >= 0.0 {
WeightRange {
high: magnitude,
low: 0.0,
}
} else {
WeightRange {
high: 0.0,
low: magnitude,
}
}
} | identifier_body |
lib.rs | // Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
#![deny(warnings)]
// Enable all clippy lints except for many of the pedantic ones. It's a shame this needs to be copied and pasted across crates, but there doesn't appear to be a way to include inner attributes from a common source.
#![deny(
clippy::all,
clippy::default_trait_access,
clippy::expl_impl_clone_on_copy,
clippy::if_not_else,
clippy::needless_continue,
clippy::unseparated_literal_suffix,
// TODO: Falsely triggers for async/await:
// see https://github.com/rust-lang/rust-clippy/issues/5360
// clippy::used_underscore_binding
)]
// It is often more clear to show that nothing is being moved.
#![allow(clippy::match_ref_pats)]
// Subjective style.
#![allow(
clippy::len_without_is_empty,
clippy::redundant_field_names,
clippy::too_many_arguments
)]
// Default isn't as big a deal as people seem to think it is.
#![allow(clippy::new_without_default, clippy::new_ret_no_self)]
// Arc<Mutex> can be more clear than needing to grok Orderings:
#![allow(clippy::mutex_atomic)]
use std::collections::VecDeque;
use std::future::Future;
use std::sync::Arc;
use parking_lot::Mutex;
use tokio::sync::{Semaphore, SemaphorePermit};
struct Inner {
sema: Semaphore,
available_ids: Mutex<VecDeque<usize>>,
}
#[derive(Clone)]
pub struct AsyncSemaphore {
inner: Arc<Inner>,
}
impl AsyncSemaphore {
pub fn new(permits: usize) -> AsyncSemaphore {
let mut available_ids = VecDeque::new();
for id in 1..=permits {
available_ids.push_back(id);
}
AsyncSemaphore {
inner: Arc::new(Inner {
sema: Semaphore::new(permits),
available_ids: Mutex::new(available_ids),
}),
}
}
pub fn available_permits(&self) -> usize {
self.inner.sema.available_permits()
}
///
/// Runs the given Future-creating function (and the Future it returns) under the semaphore.
///
pub async fn with_acquired<F, B, O>(self, f: F) -> O
where
F: FnOnce(usize) -> B + Send +'static,
B: Future<Output = O> + Send +'static,
{ | let res = f(permit.id).await;
drop(permit);
res
}
async fn acquire(&self) -> Permit<'_> {
let permit = self.inner.sema.acquire().await.expect("semaphore closed");
let id = {
let mut available_ids = self.inner.available_ids.lock();
available_ids
.pop_front()
.expect("More permits were distributed than ids exist.")
};
Permit {
inner: self.inner.clone(),
_permit: permit,
id,
}
}
}
pub struct Permit<'a> {
inner: Arc<Inner>,
// NB: Kept for its `Drop` impl.
_permit: SemaphorePermit<'a>,
id: usize,
}
impl<'a> Drop for Permit<'a> {
fn drop(&mut self) {
let mut available_ids = self.inner.available_ids.lock();
available_ids.push_back(self.id);
}
}
#[cfg(test)]
mod tests; | let permit = self.acquire().await; | random_line_split |
lib.rs | // Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
#![deny(warnings)]
// Enable all clippy lints except for many of the pedantic ones. It's a shame this needs to be copied and pasted across crates, but there doesn't appear to be a way to include inner attributes from a common source.
#![deny(
clippy::all,
clippy::default_trait_access,
clippy::expl_impl_clone_on_copy,
clippy::if_not_else,
clippy::needless_continue,
clippy::unseparated_literal_suffix,
// TODO: Falsely triggers for async/await:
// see https://github.com/rust-lang/rust-clippy/issues/5360
// clippy::used_underscore_binding
)]
// It is often more clear to show that nothing is being moved.
#![allow(clippy::match_ref_pats)]
// Subjective style.
#![allow(
clippy::len_without_is_empty,
clippy::redundant_field_names,
clippy::too_many_arguments
)]
// Default isn't as big a deal as people seem to think it is.
#![allow(clippy::new_without_default, clippy::new_ret_no_self)]
// Arc<Mutex> can be more clear than needing to grok Orderings:
#![allow(clippy::mutex_atomic)]
use std::collections::VecDeque;
use std::future::Future;
use std::sync::Arc;
use parking_lot::Mutex;
use tokio::sync::{Semaphore, SemaphorePermit};
struct Inner {
sema: Semaphore,
available_ids: Mutex<VecDeque<usize>>,
}
#[derive(Clone)]
pub struct AsyncSemaphore {
inner: Arc<Inner>,
}
impl AsyncSemaphore {
pub fn new(permits: usize) -> AsyncSemaphore {
let mut available_ids = VecDeque::new();
for id in 1..=permits {
available_ids.push_back(id);
}
AsyncSemaphore {
inner: Arc::new(Inner {
sema: Semaphore::new(permits),
available_ids: Mutex::new(available_ids),
}),
}
}
pub fn | (&self) -> usize {
self.inner.sema.available_permits()
}
///
/// Runs the given Future-creating function (and the Future it returns) under the semaphore.
///
pub async fn with_acquired<F, B, O>(self, f: F) -> O
where
F: FnOnce(usize) -> B + Send +'static,
B: Future<Output = O> + Send +'static,
{
let permit = self.acquire().await;
let res = f(permit.id).await;
drop(permit);
res
}
async fn acquire(&self) -> Permit<'_> {
let permit = self.inner.sema.acquire().await.expect("semaphore closed");
let id = {
let mut available_ids = self.inner.available_ids.lock();
available_ids
.pop_front()
.expect("More permits were distributed than ids exist.")
};
Permit {
inner: self.inner.clone(),
_permit: permit,
id,
}
}
}
pub struct Permit<'a> {
inner: Arc<Inner>,
// NB: Kept for its `Drop` impl.
_permit: SemaphorePermit<'a>,
id: usize,
}
impl<'a> Drop for Permit<'a> {
fn drop(&mut self) {
let mut available_ids = self.inner.available_ids.lock();
available_ids.push_back(self.id);
}
}
#[cfg(test)]
mod tests;
| available_permits | identifier_name |
error.rs | use std::error;
use std::fmt;
use provider;
use provider::service::inline::systemd;
#[derive(Debug)]
pub enum | {
DBus(systemd::dbus::Error),
DBusArgTypeMismatch(systemd::dbus::arg::TypeMismatchError),
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::DBus(ref err) => err.description(),
Error::DBusArgTypeMismatch(ref err) => err.description(),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::DBus(ref err) => err.fmt(f),
Error::DBusArgTypeMismatch(ref err) => err.fmt(f),
}
}
}
impl From<systemd::dbus::Error> for Error {
fn from(err: systemd::dbus::Error) -> Error {
Error::DBus(err)
}
}
impl From<systemd::dbus::arg::TypeMismatchError> for Error {
fn from(err: systemd::dbus::arg::TypeMismatchError) -> Error {
Error::DBusArgTypeMismatch(err)
}
}
impl From<systemd::dbus::Error> for provider::error::Error {
fn from(err: systemd::dbus::Error) -> provider::error::Error {
Error::DBus(err).into()
}
}
impl From<systemd::dbus::arg::TypeMismatchError> for provider::error::Error {
fn from(err: systemd::dbus::arg::TypeMismatchError) -> provider::error::Error {
Error::DBusArgTypeMismatch(err).into()
}
}
| Error | identifier_name |
error.rs | use std::error;
use std::fmt; | pub enum Error {
DBus(systemd::dbus::Error),
DBusArgTypeMismatch(systemd::dbus::arg::TypeMismatchError),
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::DBus(ref err) => err.description(),
Error::DBusArgTypeMismatch(ref err) => err.description(),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::DBus(ref err) => err.fmt(f),
Error::DBusArgTypeMismatch(ref err) => err.fmt(f),
}
}
}
impl From<systemd::dbus::Error> for Error {
fn from(err: systemd::dbus::Error) -> Error {
Error::DBus(err)
}
}
impl From<systemd::dbus::arg::TypeMismatchError> for Error {
fn from(err: systemd::dbus::arg::TypeMismatchError) -> Error {
Error::DBusArgTypeMismatch(err)
}
}
impl From<systemd::dbus::Error> for provider::error::Error {
fn from(err: systemd::dbus::Error) -> provider::error::Error {
Error::DBus(err).into()
}
}
impl From<systemd::dbus::arg::TypeMismatchError> for provider::error::Error {
fn from(err: systemd::dbus::arg::TypeMismatchError) -> provider::error::Error {
Error::DBusArgTypeMismatch(err).into()
}
} |
use provider;
use provider::service::inline::systemd;
#[derive(Debug)] | random_line_split |
error.rs | use std::error;
use std::fmt;
use provider;
use provider::service::inline::systemd;
#[derive(Debug)]
pub enum Error {
DBus(systemd::dbus::Error),
DBusArgTypeMismatch(systemd::dbus::arg::TypeMismatchError),
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::DBus(ref err) => err.description(),
Error::DBusArgTypeMismatch(ref err) => err.description(),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::DBus(ref err) => err.fmt(f),
Error::DBusArgTypeMismatch(ref err) => err.fmt(f),
}
}
}
impl From<systemd::dbus::Error> for Error {
fn from(err: systemd::dbus::Error) -> Error {
Error::DBus(err)
}
}
impl From<systemd::dbus::arg::TypeMismatchError> for Error {
fn from(err: systemd::dbus::arg::TypeMismatchError) -> Error {
Error::DBusArgTypeMismatch(err)
}
}
impl From<systemd::dbus::Error> for provider::error::Error {
fn from(err: systemd::dbus::Error) -> provider::error::Error |
}
impl From<systemd::dbus::arg::TypeMismatchError> for provider::error::Error {
fn from(err: systemd::dbus::arg::TypeMismatchError) -> provider::error::Error {
Error::DBusArgTypeMismatch(err).into()
}
}
| {
Error::DBus(err).into()
} | identifier_body |
lib_2015.rs | // Copyright 2020 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate proc_macro;
use proc_macro::TokenStream;
/// This macro is a no-op; it is exceedingly simple as a result
/// of avoiding dependencies on both the syn and quote crates.
#[proc_macro_derive(HelloWorld)]
pub fn hello_world(_input: TokenStream) -> TokenStream | {
TokenStream::new()
} | identifier_body |
|
lib_2015.rs | // Copyright 2020 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate proc_macro;
use proc_macro::TokenStream;
/// This macro is a no-op; it is exceedingly simple as a result
/// of avoiding dependencies on both the syn and quote crates.
#[proc_macro_derive(HelloWorld)]
pub fn | (_input: TokenStream) -> TokenStream {
TokenStream::new()
}
| hello_world | identifier_name |
lib_2015.rs | // Copyright 2020 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
| use proc_macro::TokenStream;
/// This macro is a no-op; it is exceedingly simple as a result
/// of avoiding dependencies on both the syn and quote crates.
#[proc_macro_derive(HelloWorld)]
pub fn hello_world(_input: TokenStream) -> TokenStream {
TokenStream::new()
} | extern crate proc_macro;
| random_line_split |
noop_method_call.rs | use crate::context::LintContext;
use crate::rustc_middle::ty::TypeFoldable;
use crate::LateContext;
use crate::LateLintPass;
use rustc_hir::def::DefKind;
use rustc_hir::{Expr, ExprKind};
use rustc_middle::ty;
use rustc_span::symbol::sym;
declare_lint! {
/// The `noop_method_call` lint detects specific calls to noop methods
/// such as a calling `<&T as Clone>::clone` where `T:!Clone`.
///
/// ### Example
///
/// ```rust
/// # #![allow(unused)]
/// #![warn(noop_method_call)]
/// struct Foo;
/// let foo = &Foo;
/// let clone: &Foo = foo.clone();
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// Some method calls are noops meaning that they do nothing. Usually such methods
/// are the result of blanket implementations that happen to create some method invocations
/// that end up not doing anything. For instance, `Clone` is implemented on all `&T`, but
/// calling `clone` on a `&T` where `T` does not implement clone, actually doesn't do anything
/// as references are copy. This lint detects these calls and warns the user about them.
pub NOOP_METHOD_CALL,
Allow,
"detects the use of well-known noop methods"
}
declare_lint_pass!(NoopMethodCall => [NOOP_METHOD_CALL]);
impl<'tcx> LateLintPass<'tcx> for NoopMethodCall {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
// We only care about method calls.
let (call, elements) = match expr.kind {
ExprKind::MethodCall(call, _, elements, _) => (call, elements),
_ => return,
};
// We only care about method calls corresponding to the `Clone`, `Deref` and `Borrow`
// traits and ignore any other method call.
let (trait_id, did) = match cx.typeck_results().type_dependent_def(expr.hir_id) {
// Verify we are dealing with a method/associated function.
Some((DefKind::AssocFn, did)) => match cx.tcx.trait_of_item(did) {
// Check that we're dealing with a trait method for one of the traits we care about.
Some(trait_id)
if matches!(
cx.tcx.get_diagnostic_name(trait_id),
Some(sym::Borrow | sym::Clone | sym::Deref)
) =>
{
(trait_id, did)
}
_ => return,
},
_ => return,
};
let substs = cx.typeck_results().node_substs(expr.hir_id);
if substs.definitely_needs_subst(cx.tcx) {
// We can't resolve on types that require monomorphization, so we don't handle them if
// we need to perfom substitution.
return;
}
let param_env = cx.tcx.param_env(trait_id);
// Resolve the trait method instance.
let i = match ty::Instance::resolve(cx.tcx, param_env, did, substs) {
Ok(Some(i)) => i,
_ => return,
};
// (Re)check that it implements the noop diagnostic.
for s in [sym::noop_method_clone, sym::noop_method_deref, sym::noop_method_borrow].iter() {
if cx.tcx.is_diagnostic_item(*s, i.def_id()) {
let method = &call.ident.name;
let receiver = &elements[0];
let receiver_ty = cx.typeck_results().expr_ty(receiver);
let expr_ty = cx.typeck_results().expr_ty_adjusted(expr);
if receiver_ty!= expr_ty {
// This lint will only trigger if the receiver type and resulting expression \
// type are the same, implying that the method call is unnecessary.
return;
}
let expr_span = expr.span; | let note = format!(
"the type `{:?}` which `{}` is being called on is the same as \
the type returned from `{}`, so the method call does not do \
anything and can be removed",
receiver_ty, method, method,
);
let span = expr_span.with_lo(receiver.span.hi());
cx.struct_span_lint(NOOP_METHOD_CALL, span, |lint| {
let method = &call.ident.name;
let message = format!(
"call to `.{}()` on a reference in this situation does nothing",
&method,
);
lint.build(&message)
.span_label(span, "unnecessary method call")
.note(¬e)
.emit()
});
}
}
}
} | random_line_split |
|
noop_method_call.rs | use crate::context::LintContext;
use crate::rustc_middle::ty::TypeFoldable;
use crate::LateContext;
use crate::LateLintPass;
use rustc_hir::def::DefKind;
use rustc_hir::{Expr, ExprKind};
use rustc_middle::ty;
use rustc_span::symbol::sym;
declare_lint! {
/// The `noop_method_call` lint detects specific calls to noop methods
/// such as a calling `<&T as Clone>::clone` where `T:!Clone`.
///
/// ### Example
///
/// ```rust
/// # #![allow(unused)]
/// #![warn(noop_method_call)]
/// struct Foo;
/// let foo = &Foo;
/// let clone: &Foo = foo.clone();
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// Some method calls are noops meaning that they do nothing. Usually such methods
/// are the result of blanket implementations that happen to create some method invocations
/// that end up not doing anything. For instance, `Clone` is implemented on all `&T`, but
/// calling `clone` on a `&T` where `T` does not implement clone, actually doesn't do anything
/// as references are copy. This lint detects these calls and warns the user about them.
pub NOOP_METHOD_CALL,
Allow,
"detects the use of well-known noop methods"
}
declare_lint_pass!(NoopMethodCall => [NOOP_METHOD_CALL]);
impl<'tcx> LateLintPass<'tcx> for NoopMethodCall {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) | _ => return,
},
_ => return,
};
let substs = cx.typeck_results().node_substs(expr.hir_id);
if substs.definitely_needs_subst(cx.tcx) {
// We can't resolve on types that require monomorphization, so we don't handle them if
// we need to perfom substitution.
return;
}
let param_env = cx.tcx.param_env(trait_id);
// Resolve the trait method instance.
let i = match ty::Instance::resolve(cx.tcx, param_env, did, substs) {
Ok(Some(i)) => i,
_ => return,
};
// (Re)check that it implements the noop diagnostic.
for s in [sym::noop_method_clone, sym::noop_method_deref, sym::noop_method_borrow].iter() {
if cx.tcx.is_diagnostic_item(*s, i.def_id()) {
let method = &call.ident.name;
let receiver = &elements[0];
let receiver_ty = cx.typeck_results().expr_ty(receiver);
let expr_ty = cx.typeck_results().expr_ty_adjusted(expr);
if receiver_ty!= expr_ty {
// This lint will only trigger if the receiver type and resulting expression \
// type are the same, implying that the method call is unnecessary.
return;
}
let expr_span = expr.span;
let note = format!(
"the type `{:?}` which `{}` is being called on is the same as \
the type returned from `{}`, so the method call does not do \
anything and can be removed",
receiver_ty, method, method,
);
let span = expr_span.with_lo(receiver.span.hi());
cx.struct_span_lint(NOOP_METHOD_CALL, span, |lint| {
let method = &call.ident.name;
let message = format!(
"call to `.{}()` on a reference in this situation does nothing",
&method,
);
lint.build(&message)
.span_label(span, "unnecessary method call")
.note(¬e)
.emit()
});
}
}
}
}
| {
// We only care about method calls.
let (call, elements) = match expr.kind {
ExprKind::MethodCall(call, _, elements, _) => (call, elements),
_ => return,
};
// We only care about method calls corresponding to the `Clone`, `Deref` and `Borrow`
// traits and ignore any other method call.
let (trait_id, did) = match cx.typeck_results().type_dependent_def(expr.hir_id) {
// Verify we are dealing with a method/associated function.
Some((DefKind::AssocFn, did)) => match cx.tcx.trait_of_item(did) {
// Check that we're dealing with a trait method for one of the traits we care about.
Some(trait_id)
if matches!(
cx.tcx.get_diagnostic_name(trait_id),
Some(sym::Borrow | sym::Clone | sym::Deref)
) =>
{
(trait_id, did)
} | identifier_body |
noop_method_call.rs | use crate::context::LintContext;
use crate::rustc_middle::ty::TypeFoldable;
use crate::LateContext;
use crate::LateLintPass;
use rustc_hir::def::DefKind;
use rustc_hir::{Expr, ExprKind};
use rustc_middle::ty;
use rustc_span::symbol::sym;
declare_lint! {
/// The `noop_method_call` lint detects specific calls to noop methods
/// such as a calling `<&T as Clone>::clone` where `T:!Clone`.
///
/// ### Example
///
/// ```rust
/// # #![allow(unused)]
/// #![warn(noop_method_call)]
/// struct Foo;
/// let foo = &Foo;
/// let clone: &Foo = foo.clone();
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// Some method calls are noops meaning that they do nothing. Usually such methods
/// are the result of blanket implementations that happen to create some method invocations
/// that end up not doing anything. For instance, `Clone` is implemented on all `&T`, but
/// calling `clone` on a `&T` where `T` does not implement clone, actually doesn't do anything
/// as references are copy. This lint detects these calls and warns the user about them.
pub NOOP_METHOD_CALL,
Allow,
"detects the use of well-known noop methods"
}
declare_lint_pass!(NoopMethodCall => [NOOP_METHOD_CALL]);
impl<'tcx> LateLintPass<'tcx> for NoopMethodCall {
fn | (&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
// We only care about method calls.
let (call, elements) = match expr.kind {
ExprKind::MethodCall(call, _, elements, _) => (call, elements),
_ => return,
};
// We only care about method calls corresponding to the `Clone`, `Deref` and `Borrow`
// traits and ignore any other method call.
let (trait_id, did) = match cx.typeck_results().type_dependent_def(expr.hir_id) {
// Verify we are dealing with a method/associated function.
Some((DefKind::AssocFn, did)) => match cx.tcx.trait_of_item(did) {
// Check that we're dealing with a trait method for one of the traits we care about.
Some(trait_id)
if matches!(
cx.tcx.get_diagnostic_name(trait_id),
Some(sym::Borrow | sym::Clone | sym::Deref)
) =>
{
(trait_id, did)
}
_ => return,
},
_ => return,
};
let substs = cx.typeck_results().node_substs(expr.hir_id);
if substs.definitely_needs_subst(cx.tcx) {
// We can't resolve on types that require monomorphization, so we don't handle them if
// we need to perfom substitution.
return;
}
let param_env = cx.tcx.param_env(trait_id);
// Resolve the trait method instance.
let i = match ty::Instance::resolve(cx.tcx, param_env, did, substs) {
Ok(Some(i)) => i,
_ => return,
};
// (Re)check that it implements the noop diagnostic.
for s in [sym::noop_method_clone, sym::noop_method_deref, sym::noop_method_borrow].iter() {
if cx.tcx.is_diagnostic_item(*s, i.def_id()) {
let method = &call.ident.name;
let receiver = &elements[0];
let receiver_ty = cx.typeck_results().expr_ty(receiver);
let expr_ty = cx.typeck_results().expr_ty_adjusted(expr);
if receiver_ty!= expr_ty {
// This lint will only trigger if the receiver type and resulting expression \
// type are the same, implying that the method call is unnecessary.
return;
}
let expr_span = expr.span;
let note = format!(
"the type `{:?}` which `{}` is being called on is the same as \
the type returned from `{}`, so the method call does not do \
anything and can be removed",
receiver_ty, method, method,
);
let span = expr_span.with_lo(receiver.span.hi());
cx.struct_span_lint(NOOP_METHOD_CALL, span, |lint| {
let method = &call.ident.name;
let message = format!(
"call to `.{}()` on a reference in this situation does nothing",
&method,
);
lint.build(&message)
.span_label(span, "unnecessary method call")
.note(¬e)
.emit()
});
}
}
}
}
| check_expr | identifier_name |
issue-3743.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Vec2 {
x: f64,
y: f64
}
// methods we want to export as methods as well as operators
impl Vec2 {
#[inline(always)]
fn | (self, other: f64) -> Vec2 {
Vec2 { x: self.x * other, y: self.y * other }
}
}
// Right-hand-side operator visitor pattern
trait RhsOfVec2Mul<Result> { fn mul_vec2_by(&self, lhs: &Vec2) -> Result; }
// Vec2's implementation of Mul "from the other side" using the above trait
impl<Res, Rhs: RhsOfVec2Mul<Res>> Mul<Rhs,Res> for Vec2 {
fn mul(&self, rhs: &Rhs) -> Res { rhs.mul_vec2_by(self) }
}
// Implementation of 'f64 as right-hand-side of Vec2::Mul'
impl RhsOfVec2Mul<Vec2> for f64 {
fn mul_vec2_by(&self, lhs: &Vec2) -> Vec2 { lhs.vmul(*self) }
}
// Usage with failing inference
pub fn main() {
let a = Vec2 { x: 3.0, y: 4.0 };
// the following compiles and works properly
let v1: Vec2 = a * 3.0;
println!("{} {}", v1.x, v1.y);
// the following compiles but v2 will not be Vec2 yet and
// using it later will cause an error that the type of v2
// must be known
let v2 = a * 3.0;
println!("{} {}", v2.x, v2.y); // error regarding v2's type
}
| vmul | identifier_name |
issue-3743.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Vec2 {
x: f64,
y: f64
}
// methods we want to export as methods as well as operators
impl Vec2 {
#[inline(always)]
fn vmul(self, other: f64) -> Vec2 |
}
// Right-hand-side operator visitor pattern
trait RhsOfVec2Mul<Result> { fn mul_vec2_by(&self, lhs: &Vec2) -> Result; }
// Vec2's implementation of Mul "from the other side" using the above trait
impl<Res, Rhs: RhsOfVec2Mul<Res>> Mul<Rhs,Res> for Vec2 {
fn mul(&self, rhs: &Rhs) -> Res { rhs.mul_vec2_by(self) }
}
// Implementation of 'f64 as right-hand-side of Vec2::Mul'
impl RhsOfVec2Mul<Vec2> for f64 {
fn mul_vec2_by(&self, lhs: &Vec2) -> Vec2 { lhs.vmul(*self) }
}
// Usage with failing inference
pub fn main() {
let a = Vec2 { x: 3.0, y: 4.0 };
// the following compiles and works properly
let v1: Vec2 = a * 3.0;
println!("{} {}", v1.x, v1.y);
// the following compiles but v2 will not be Vec2 yet and
// using it later will cause an error that the type of v2
// must be known
let v2 = a * 3.0;
println!("{} {}", v2.x, v2.y); // error regarding v2's type
}
| {
Vec2 { x: self.x * other, y: self.y * other }
} | identifier_body |
issue-3743.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Vec2 {
x: f64,
y: f64
}
// methods we want to export as methods as well as operators
impl Vec2 {
#[inline(always)]
fn vmul(self, other: f64) -> Vec2 {
Vec2 { x: self.x * other, y: self.y * other }
}
} |
// Right-hand-side operator visitor pattern
trait RhsOfVec2Mul<Result> { fn mul_vec2_by(&self, lhs: &Vec2) -> Result; }
// Vec2's implementation of Mul "from the other side" using the above trait
impl<Res, Rhs: RhsOfVec2Mul<Res>> Mul<Rhs,Res> for Vec2 {
fn mul(&self, rhs: &Rhs) -> Res { rhs.mul_vec2_by(self) }
}
// Implementation of 'f64 as right-hand-side of Vec2::Mul'
impl RhsOfVec2Mul<Vec2> for f64 {
fn mul_vec2_by(&self, lhs: &Vec2) -> Vec2 { lhs.vmul(*self) }
}
// Usage with failing inference
pub fn main() {
let a = Vec2 { x: 3.0, y: 4.0 };
// the following compiles and works properly
let v1: Vec2 = a * 3.0;
println!("{} {}", v1.x, v1.y);
// the following compiles but v2 will not be Vec2 yet and
// using it later will cause an error that the type of v2
// must be known
let v2 = a * 3.0;
println!("{} {}", v2.x, v2.y); // error regarding v2's type
} | random_line_split |
|
marker-attribute-on-non-trait.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(marker_trait_attr)]
#[marker] //~ ERROR attribute can only be applied to a trait
struct | {}
#[marker] //~ ERROR attribute can only be applied to a trait
impl Struct {}
#[marker] //~ ERROR attribute can only be applied to a trait
union Union {
x: i32,
}
#[marker] //~ ERROR attribute can only be applied to a trait
const CONST: usize = 10;
#[marker] //~ ERROR attribute can only be applied to a trait
fn function() {}
#[marker] //~ ERROR attribute can only be applied to a trait
type Type = ();
fn main() {}
| Struct | identifier_name |
marker-attribute-on-non-trait.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(marker_trait_attr)]
#[marker] //~ ERROR attribute can only be applied to a trait
struct Struct {}
#[marker] //~ ERROR attribute can only be applied to a trait
impl Struct {}
#[marker] //~ ERROR attribute can only be applied to a trait
union Union { |
#[marker] //~ ERROR attribute can only be applied to a trait
const CONST: usize = 10;
#[marker] //~ ERROR attribute can only be applied to a trait
fn function() {}
#[marker] //~ ERROR attribute can only be applied to a trait
type Type = ();
fn main() {} | x: i32,
} | random_line_split |
tyencode.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type encoding
use std::cell::RefCell;
use std::hashmap::HashMap;
use std::io;
use std::io::MemWriter;
use std::str;
use std::fmt;
use middle::ty::param_ty;
use middle::ty;
use syntax::abi::AbiSet;
use syntax::ast;
use syntax::ast::*;
use syntax::diagnostic::SpanHandler;
use syntax::print::pprust::*;
macro_rules! mywrite( ($wr:expr, $($arg:tt)*) => (
format_args!(|a| { mywrite($wr, a) }, $($arg)*)
) )
pub struct ctxt {
diag: @SpanHandler,
// Def -> str Callback:
ds: extern "Rust" fn(DefId) -> ~str,
// The type context.
tcx: ty::ctxt,
abbrevs: abbrev_ctxt
}
// Compact string representation for ty.t values. API ty_str & parse_from_str.
// Extra parameters are for converting to/from def_ids in the string rep.
// Whatever format you choose should not contain pipe characters.
pub struct ty_abbrev {
pos: uint,
len: uint,
s: @str
}
pub enum abbrev_ctxt {
ac_no_abbrevs,
ac_use_abbrevs(@RefCell<HashMap<ty::t, ty_abbrev>>),
}
fn mywrite(w: &mut MemWriter, fmt: &fmt::Arguments) {
fmt::write(&mut *w as &mut io::Writer, fmt);
}
pub fn enc_ty(w: &mut MemWriter, cx: @ctxt, t: ty::t) {
match cx.abbrevs {
ac_no_abbrevs => {
let result_str_opt;
{
let short_names_cache = cx.tcx.short_names_cache.borrow();
result_str_opt = short_names_cache.get()
.find(&t)
.map(|result| *result);
}
let result_str = match result_str_opt {
Some(s) => s,
None => {
let wr = &mut MemWriter::new();
enc_sty(wr, cx, &ty::get(t).sty);
let s = str::from_utf8(wr.get_ref()).to_managed();
let mut short_names_cache = cx.tcx
.short_names_cache
.borrow_mut();
short_names_cache.get().insert(t, s);
s
}
};
w.write(result_str.as_bytes());
}
ac_use_abbrevs(abbrevs) => {
{
let mut abbrevs = abbrevs.borrow_mut();
match abbrevs.get().find(&t) {
Some(a) => { w.write(a.s.as_bytes()); return; }
None => {}
}
}
let pos = w.tell();
enc_sty(w, cx, &ty::get(t).sty);
let end = w.tell();
let len = end - pos;
fn estimate_sz(u: u64) -> u64 {
let mut n = u;
let mut len = 0;
while n!= 0 { len += 1; n = n >> 4; }
return len;
}
let abbrev_len = 3 + estimate_sz(pos) + estimate_sz(len);
if abbrev_len < len {
// I.e. it's actually an abbreviation.
let s = format!("\\#{:x}:{:x}\\#", pos, len).to_managed();
let a = ty_abbrev { pos: pos as uint,
len: len as uint,
s: s };
{
let mut abbrevs = abbrevs.borrow_mut();
abbrevs.get().insert(t, a);
}
}
return;
}
}
}
fn enc_mutability(w: &mut MemWriter, mt: ast::Mutability) {
match mt {
MutImmutable => (),
MutMutable => mywrite!(w, "m"),
}
}
fn enc_mt(w: &mut MemWriter, cx: @ctxt, mt: ty::mt) {
enc_mutability(w, mt.mutbl);
enc_ty(w, cx, mt.ty);
}
fn enc_opt<T>(w: &mut MemWriter, t: Option<T>, enc_f: |&mut MemWriter, T|) {
match t {
None => mywrite!(w, "n"),
Some(v) => {
mywrite!(w, "s");
enc_f(w, v);
}
}
}
pub fn enc_substs(w: &mut MemWriter, cx: @ctxt, substs: &ty::substs) {
enc_region_substs(w, cx, &substs.regions);
enc_opt(w, substs.self_ty, |w, t| enc_ty(w, cx, t));
mywrite!(w, "[");
for t in substs.tps.iter() { enc_ty(w, cx, *t); }
mywrite!(w, "]");
}
fn enc_region_substs(w: &mut MemWriter, cx: @ctxt, substs: &ty::RegionSubsts) {
match *substs {
ty::ErasedRegions => {
mywrite!(w, "e");
}
ty::NonerasedRegions(ref regions) => {
mywrite!(w, "n");
for &r in regions.iter() {
enc_region(w, cx, r);
}
mywrite!(w, ".");
}
}
}
fn enc_region(w: &mut MemWriter, cx: @ctxt, r: ty::Region) {
match r {
ty::ReLateBound(id, br) => {
mywrite!(w, "b[{}|", id);
enc_bound_region(w, cx, br);
mywrite!(w, "]");
}
ty::ReEarlyBound(node_id, index, ident) => {
mywrite!(w, "B[{}|{}|{}]",
node_id,
index,
cx.tcx.sess.str_of(ident));
}
ty::ReFree(ref fr) => {
mywrite!(w, "f[{}|", fr.scope_id);
enc_bound_region(w, cx, fr.bound_region);
mywrite!(w, "]");
}
ty::ReScope(nid) => {
mywrite!(w, "s{}|", nid);
}
ty::ReStatic => {
mywrite!(w, "t");
}
ty::ReEmpty => {
mywrite!(w, "e");
}
ty::ReInfer(_) => {
// these should not crop up after typeck
cx.diag.handler().bug("Cannot encode region variables");
}
}
}
fn enc_bound_region(w: &mut MemWriter, cx: @ctxt, br: ty::BoundRegion) {
match br {
ty::BrAnon(idx) => {
mywrite!(w, "a{}|", idx);
}
ty::BrNamed(d, s) => {
mywrite!(w, "[{}|{}]",
(cx.ds)(d),
cx.tcx.sess.str_of(s));
}
ty::BrFresh(id) => {
mywrite!(w, "f{}|", id);
}
}
}
pub fn enc_vstore(w: &mut MemWriter, cx: @ctxt, v: ty::vstore) {
mywrite!(w, "/");
match v {
ty::vstore_fixed(u) => mywrite!(w, "{}|", u),
ty::vstore_uniq => mywrite!(w, "~"),
ty::vstore_box => mywrite!(w, "@"),
ty::vstore_slice(r) => {
mywrite!(w, "&");
enc_region(w, cx, r);
}
}
}
pub fn enc_trait_ref(w: &mut MemWriter, cx: @ctxt, s: &ty::TraitRef) {
mywrite!(w, "{}|", (cx.ds)(s.def_id));
enc_substs(w, cx, &s.substs);
}
pub fn enc_trait_store(w: &mut MemWriter, cx: @ctxt, s: ty::TraitStore) {
match s {
ty::UniqTraitStore => mywrite!(w, "~"),
ty::BoxTraitStore => mywrite!(w, "@"),
ty::RegionTraitStore(re) => {
mywrite!(w, "&");
enc_region(w, cx, re);
}
}
}
fn enc_sty(w: &mut MemWriter, cx: @ctxt, st: &ty::sty) {
match *st {
ty::ty_nil => mywrite!(w, "n"),
ty::ty_bot => mywrite!(w, "z"),
ty::ty_bool => mywrite!(w, "b"),
ty::ty_char => mywrite!(w, "c"),
ty::ty_int(t) => {
match t {
TyI => mywrite!(w, "i"),
TyI8 => mywrite!(w, "MB"),
TyI16 => mywrite!(w, "MW"),
TyI32 => mywrite!(w, "ML"),
TyI64 => mywrite!(w, "MD")
}
}
ty::ty_uint(t) => {
match t {
TyU => mywrite!(w, "u"),
TyU8 => mywrite!(w, "Mb"),
TyU16 => mywrite!(w, "Mw"),
TyU32 => mywrite!(w, "Ml"),
TyU64 => mywrite!(w, "Md")
}
}
ty::ty_float(t) => {
match t {
TyF32 => mywrite!(w, "Mf"),
TyF64 => mywrite!(w, "MF"),
}
}
ty::ty_enum(def, ref substs) => {
mywrite!(w, "t[{}|", (cx.ds)(def));
enc_substs(w, cx, substs);
mywrite!(w, "]");
}
ty::ty_trait(def, ref substs, store, mt, bounds) => {
mywrite!(w, "x[{}|", (cx.ds)(def));
enc_substs(w, cx, substs);
enc_trait_store(w, cx, store);
enc_mutability(w, mt);
let bounds = ty::ParamBounds {builtin_bounds: bounds,
trait_bounds: ~[]};
enc_bounds(w, cx, &bounds);
mywrite!(w, "]");
}
ty::ty_tup(ref ts) => {
mywrite!(w, "T[");
for t in ts.iter() { enc_ty(w, cx, *t); }
mywrite!(w, "]");
}
ty::ty_box(typ) => { mywrite!(w, "@"); enc_ty(w, cx, typ); }
ty::ty_uniq(typ) => { mywrite!(w, "~"); enc_ty(w, cx, typ); }
ty::ty_ptr(mt) => { mywrite!(w, "*"); enc_mt(w, cx, mt); }
ty::ty_rptr(r, mt) => {
mywrite!(w, "&");
enc_region(w, cx, r); | ty::ty_vec(mt, v) => {
mywrite!(w, "V");
enc_mt(w, cx, mt);
enc_vstore(w, cx, v);
}
ty::ty_str(v) => {
mywrite!(w, "v");
enc_vstore(w, cx, v);
}
ty::ty_unboxed_vec(mt) => { mywrite!(w, "U"); enc_mt(w, cx, mt); }
ty::ty_closure(ref f) => {
mywrite!(w, "f");
enc_closure_ty(w, cx, f);
}
ty::ty_bare_fn(ref f) => {
mywrite!(w, "F");
enc_bare_fn_ty(w, cx, f);
}
ty::ty_infer(_) => {
cx.diag.handler().bug("Cannot encode inference variable types");
}
ty::ty_param(param_ty {idx: id, def_id: did}) => {
mywrite!(w, "p{}|{}", (cx.ds)(did), id);
}
ty::ty_self(did) => {
mywrite!(w, "s{}|", (cx.ds)(did));
}
ty::ty_type => mywrite!(w, "Y"),
ty::ty_opaque_closure_ptr(p) => {
mywrite!(w, "C&");
enc_sigil(w, p);
}
ty::ty_struct(def, ref substs) => {
mywrite!(w, "a[{}|", (cx.ds)(def));
enc_substs(w, cx, substs);
mywrite!(w, "]");
}
ty::ty_err => fail!("Shouldn't encode error type")
}
}
fn enc_sigil(w: &mut MemWriter, sigil: Sigil) {
match sigil {
ManagedSigil => mywrite!(w, "@"),
OwnedSigil => mywrite!(w, "~"),
BorrowedSigil => mywrite!(w, "&"),
}
}
fn enc_purity(w: &mut MemWriter, p: Purity) {
match p {
ImpureFn => mywrite!(w, "i"),
UnsafeFn => mywrite!(w, "u"),
ExternFn => mywrite!(w, "c")
}
}
fn enc_abi_set(w: &mut MemWriter, abis: AbiSet) {
mywrite!(w, "[");
abis.each(|abi| {
mywrite!(w, "{},", abi.name());
true
});
mywrite!(w, "]")
}
fn enc_onceness(w: &mut MemWriter, o: Onceness) {
match o {
Once => mywrite!(w, "o"),
Many => mywrite!(w, "m")
}
}
pub fn enc_bare_fn_ty(w: &mut MemWriter, cx: @ctxt, ft: &ty::BareFnTy) {
enc_purity(w, ft.purity);
enc_abi_set(w, ft.abis);
enc_fn_sig(w, cx, &ft.sig);
}
fn enc_closure_ty(w: &mut MemWriter, cx: @ctxt, ft: &ty::ClosureTy) {
enc_sigil(w, ft.sigil);
enc_purity(w, ft.purity);
enc_onceness(w, ft.onceness);
enc_region(w, cx, ft.region);
let bounds = ty::ParamBounds {builtin_bounds: ft.bounds,
trait_bounds: ~[]};
enc_bounds(w, cx, &bounds);
enc_fn_sig(w, cx, &ft.sig);
}
fn enc_fn_sig(w: &mut MemWriter, cx: @ctxt, fsig: &ty::FnSig) {
mywrite!(w, "[{}|", fsig.binder_id);
for ty in fsig.inputs.iter() {
enc_ty(w, cx, *ty);
}
mywrite!(w, "]");
if fsig.variadic {
mywrite!(w, "V");
} else {
mywrite!(w, "N");
}
enc_ty(w, cx, fsig.output);
}
fn enc_bounds(w: &mut MemWriter, cx: @ctxt, bs: &ty::ParamBounds) {
for bound in bs.builtin_bounds.iter() {
match bound {
ty::BoundSend => mywrite!(w, "S"),
ty::BoundFreeze => mywrite!(w, "K"),
ty::BoundStatic => mywrite!(w, "O"),
ty::BoundSized => mywrite!(w, "Z"),
ty::BoundPod => mywrite!(w, "P"),
}
}
for &tp in bs.trait_bounds.iter() {
mywrite!(w, "I");
enc_trait_ref(w, cx, tp);
}
mywrite!(w, ".");
}
pub fn enc_type_param_def(w: &mut MemWriter, cx: @ctxt, v: &ty::TypeParameterDef) {
mywrite!(w, "{}:{}|", cx.tcx.sess.str_of(v.ident), (cx.ds)(v.def_id));
enc_bounds(w, cx, v.bounds);
} | enc_mt(w, cx, mt);
} | random_line_split |
tyencode.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type encoding
use std::cell::RefCell;
use std::hashmap::HashMap;
use std::io;
use std::io::MemWriter;
use std::str;
use std::fmt;
use middle::ty::param_ty;
use middle::ty;
use syntax::abi::AbiSet;
use syntax::ast;
use syntax::ast::*;
use syntax::diagnostic::SpanHandler;
use syntax::print::pprust::*;
macro_rules! mywrite( ($wr:expr, $($arg:tt)*) => (
format_args!(|a| { mywrite($wr, a) }, $($arg)*)
) )
pub struct ctxt {
diag: @SpanHandler,
// Def -> str Callback:
ds: extern "Rust" fn(DefId) -> ~str,
// The type context.
tcx: ty::ctxt,
abbrevs: abbrev_ctxt
}
// Compact string representation for ty.t values. API ty_str & parse_from_str.
// Extra parameters are for converting to/from def_ids in the string rep.
// Whatever format you choose should not contain pipe characters.
pub struct ty_abbrev {
pos: uint,
len: uint,
s: @str
}
pub enum abbrev_ctxt {
ac_no_abbrevs,
ac_use_abbrevs(@RefCell<HashMap<ty::t, ty_abbrev>>),
}
fn | (w: &mut MemWriter, fmt: &fmt::Arguments) {
fmt::write(&mut *w as &mut io::Writer, fmt);
}
pub fn enc_ty(w: &mut MemWriter, cx: @ctxt, t: ty::t) {
match cx.abbrevs {
ac_no_abbrevs => {
let result_str_opt;
{
let short_names_cache = cx.tcx.short_names_cache.borrow();
result_str_opt = short_names_cache.get()
.find(&t)
.map(|result| *result);
}
let result_str = match result_str_opt {
Some(s) => s,
None => {
let wr = &mut MemWriter::new();
enc_sty(wr, cx, &ty::get(t).sty);
let s = str::from_utf8(wr.get_ref()).to_managed();
let mut short_names_cache = cx.tcx
.short_names_cache
.borrow_mut();
short_names_cache.get().insert(t, s);
s
}
};
w.write(result_str.as_bytes());
}
ac_use_abbrevs(abbrevs) => {
{
let mut abbrevs = abbrevs.borrow_mut();
match abbrevs.get().find(&t) {
Some(a) => { w.write(a.s.as_bytes()); return; }
None => {}
}
}
let pos = w.tell();
enc_sty(w, cx, &ty::get(t).sty);
let end = w.tell();
let len = end - pos;
fn estimate_sz(u: u64) -> u64 {
let mut n = u;
let mut len = 0;
while n!= 0 { len += 1; n = n >> 4; }
return len;
}
let abbrev_len = 3 + estimate_sz(pos) + estimate_sz(len);
if abbrev_len < len {
// I.e. it's actually an abbreviation.
let s = format!("\\#{:x}:{:x}\\#", pos, len).to_managed();
let a = ty_abbrev { pos: pos as uint,
len: len as uint,
s: s };
{
let mut abbrevs = abbrevs.borrow_mut();
abbrevs.get().insert(t, a);
}
}
return;
}
}
}
fn enc_mutability(w: &mut MemWriter, mt: ast::Mutability) {
match mt {
MutImmutable => (),
MutMutable => mywrite!(w, "m"),
}
}
fn enc_mt(w: &mut MemWriter, cx: @ctxt, mt: ty::mt) {
enc_mutability(w, mt.mutbl);
enc_ty(w, cx, mt.ty);
}
fn enc_opt<T>(w: &mut MemWriter, t: Option<T>, enc_f: |&mut MemWriter, T|) {
match t {
None => mywrite!(w, "n"),
Some(v) => {
mywrite!(w, "s");
enc_f(w, v);
}
}
}
pub fn enc_substs(w: &mut MemWriter, cx: @ctxt, substs: &ty::substs) {
enc_region_substs(w, cx, &substs.regions);
enc_opt(w, substs.self_ty, |w, t| enc_ty(w, cx, t));
mywrite!(w, "[");
for t in substs.tps.iter() { enc_ty(w, cx, *t); }
mywrite!(w, "]");
}
fn enc_region_substs(w: &mut MemWriter, cx: @ctxt, substs: &ty::RegionSubsts) {
match *substs {
ty::ErasedRegions => {
mywrite!(w, "e");
}
ty::NonerasedRegions(ref regions) => {
mywrite!(w, "n");
for &r in regions.iter() {
enc_region(w, cx, r);
}
mywrite!(w, ".");
}
}
}
fn enc_region(w: &mut MemWriter, cx: @ctxt, r: ty::Region) {
match r {
ty::ReLateBound(id, br) => {
mywrite!(w, "b[{}|", id);
enc_bound_region(w, cx, br);
mywrite!(w, "]");
}
ty::ReEarlyBound(node_id, index, ident) => {
mywrite!(w, "B[{}|{}|{}]",
node_id,
index,
cx.tcx.sess.str_of(ident));
}
ty::ReFree(ref fr) => {
mywrite!(w, "f[{}|", fr.scope_id);
enc_bound_region(w, cx, fr.bound_region);
mywrite!(w, "]");
}
ty::ReScope(nid) => {
mywrite!(w, "s{}|", nid);
}
ty::ReStatic => {
mywrite!(w, "t");
}
ty::ReEmpty => {
mywrite!(w, "e");
}
ty::ReInfer(_) => {
// these should not crop up after typeck
cx.diag.handler().bug("Cannot encode region variables");
}
}
}
fn enc_bound_region(w: &mut MemWriter, cx: @ctxt, br: ty::BoundRegion) {
match br {
ty::BrAnon(idx) => {
mywrite!(w, "a{}|", idx);
}
ty::BrNamed(d, s) => {
mywrite!(w, "[{}|{}]",
(cx.ds)(d),
cx.tcx.sess.str_of(s));
}
ty::BrFresh(id) => {
mywrite!(w, "f{}|", id);
}
}
}
pub fn enc_vstore(w: &mut MemWriter, cx: @ctxt, v: ty::vstore) {
mywrite!(w, "/");
match v {
ty::vstore_fixed(u) => mywrite!(w, "{}|", u),
ty::vstore_uniq => mywrite!(w, "~"),
ty::vstore_box => mywrite!(w, "@"),
ty::vstore_slice(r) => {
mywrite!(w, "&");
enc_region(w, cx, r);
}
}
}
pub fn enc_trait_ref(w: &mut MemWriter, cx: @ctxt, s: &ty::TraitRef) {
mywrite!(w, "{}|", (cx.ds)(s.def_id));
enc_substs(w, cx, &s.substs);
}
pub fn enc_trait_store(w: &mut MemWriter, cx: @ctxt, s: ty::TraitStore) {
match s {
ty::UniqTraitStore => mywrite!(w, "~"),
ty::BoxTraitStore => mywrite!(w, "@"),
ty::RegionTraitStore(re) => {
mywrite!(w, "&");
enc_region(w, cx, re);
}
}
}
fn enc_sty(w: &mut MemWriter, cx: @ctxt, st: &ty::sty) {
match *st {
ty::ty_nil => mywrite!(w, "n"),
ty::ty_bot => mywrite!(w, "z"),
ty::ty_bool => mywrite!(w, "b"),
ty::ty_char => mywrite!(w, "c"),
ty::ty_int(t) => {
match t {
TyI => mywrite!(w, "i"),
TyI8 => mywrite!(w, "MB"),
TyI16 => mywrite!(w, "MW"),
TyI32 => mywrite!(w, "ML"),
TyI64 => mywrite!(w, "MD")
}
}
ty::ty_uint(t) => {
match t {
TyU => mywrite!(w, "u"),
TyU8 => mywrite!(w, "Mb"),
TyU16 => mywrite!(w, "Mw"),
TyU32 => mywrite!(w, "Ml"),
TyU64 => mywrite!(w, "Md")
}
}
ty::ty_float(t) => {
match t {
TyF32 => mywrite!(w, "Mf"),
TyF64 => mywrite!(w, "MF"),
}
}
ty::ty_enum(def, ref substs) => {
mywrite!(w, "t[{}|", (cx.ds)(def));
enc_substs(w, cx, substs);
mywrite!(w, "]");
}
ty::ty_trait(def, ref substs, store, mt, bounds) => {
mywrite!(w, "x[{}|", (cx.ds)(def));
enc_substs(w, cx, substs);
enc_trait_store(w, cx, store);
enc_mutability(w, mt);
let bounds = ty::ParamBounds {builtin_bounds: bounds,
trait_bounds: ~[]};
enc_bounds(w, cx, &bounds);
mywrite!(w, "]");
}
ty::ty_tup(ref ts) => {
mywrite!(w, "T[");
for t in ts.iter() { enc_ty(w, cx, *t); }
mywrite!(w, "]");
}
ty::ty_box(typ) => { mywrite!(w, "@"); enc_ty(w, cx, typ); }
ty::ty_uniq(typ) => { mywrite!(w, "~"); enc_ty(w, cx, typ); }
ty::ty_ptr(mt) => { mywrite!(w, "*"); enc_mt(w, cx, mt); }
ty::ty_rptr(r, mt) => {
mywrite!(w, "&");
enc_region(w, cx, r);
enc_mt(w, cx, mt);
}
ty::ty_vec(mt, v) => {
mywrite!(w, "V");
enc_mt(w, cx, mt);
enc_vstore(w, cx, v);
}
ty::ty_str(v) => {
mywrite!(w, "v");
enc_vstore(w, cx, v);
}
ty::ty_unboxed_vec(mt) => { mywrite!(w, "U"); enc_mt(w, cx, mt); }
ty::ty_closure(ref f) => {
mywrite!(w, "f");
enc_closure_ty(w, cx, f);
}
ty::ty_bare_fn(ref f) => {
mywrite!(w, "F");
enc_bare_fn_ty(w, cx, f);
}
ty::ty_infer(_) => {
cx.diag.handler().bug("Cannot encode inference variable types");
}
ty::ty_param(param_ty {idx: id, def_id: did}) => {
mywrite!(w, "p{}|{}", (cx.ds)(did), id);
}
ty::ty_self(did) => {
mywrite!(w, "s{}|", (cx.ds)(did));
}
ty::ty_type => mywrite!(w, "Y"),
ty::ty_opaque_closure_ptr(p) => {
mywrite!(w, "C&");
enc_sigil(w, p);
}
ty::ty_struct(def, ref substs) => {
mywrite!(w, "a[{}|", (cx.ds)(def));
enc_substs(w, cx, substs);
mywrite!(w, "]");
}
ty::ty_err => fail!("Shouldn't encode error type")
}
}
fn enc_sigil(w: &mut MemWriter, sigil: Sigil) {
match sigil {
ManagedSigil => mywrite!(w, "@"),
OwnedSigil => mywrite!(w, "~"),
BorrowedSigil => mywrite!(w, "&"),
}
}
fn enc_purity(w: &mut MemWriter, p: Purity) {
match p {
ImpureFn => mywrite!(w, "i"),
UnsafeFn => mywrite!(w, "u"),
ExternFn => mywrite!(w, "c")
}
}
fn enc_abi_set(w: &mut MemWriter, abis: AbiSet) {
mywrite!(w, "[");
abis.each(|abi| {
mywrite!(w, "{},", abi.name());
true
});
mywrite!(w, "]")
}
fn enc_onceness(w: &mut MemWriter, o: Onceness) {
match o {
Once => mywrite!(w, "o"),
Many => mywrite!(w, "m")
}
}
pub fn enc_bare_fn_ty(w: &mut MemWriter, cx: @ctxt, ft: &ty::BareFnTy) {
enc_purity(w, ft.purity);
enc_abi_set(w, ft.abis);
enc_fn_sig(w, cx, &ft.sig);
}
fn enc_closure_ty(w: &mut MemWriter, cx: @ctxt, ft: &ty::ClosureTy) {
enc_sigil(w, ft.sigil);
enc_purity(w, ft.purity);
enc_onceness(w, ft.onceness);
enc_region(w, cx, ft.region);
let bounds = ty::ParamBounds {builtin_bounds: ft.bounds,
trait_bounds: ~[]};
enc_bounds(w, cx, &bounds);
enc_fn_sig(w, cx, &ft.sig);
}
fn enc_fn_sig(w: &mut MemWriter, cx: @ctxt, fsig: &ty::FnSig) {
mywrite!(w, "[{}|", fsig.binder_id);
for ty in fsig.inputs.iter() {
enc_ty(w, cx, *ty);
}
mywrite!(w, "]");
if fsig.variadic {
mywrite!(w, "V");
} else {
mywrite!(w, "N");
}
enc_ty(w, cx, fsig.output);
}
fn enc_bounds(w: &mut MemWriter, cx: @ctxt, bs: &ty::ParamBounds) {
for bound in bs.builtin_bounds.iter() {
match bound {
ty::BoundSend => mywrite!(w, "S"),
ty::BoundFreeze => mywrite!(w, "K"),
ty::BoundStatic => mywrite!(w, "O"),
ty::BoundSized => mywrite!(w, "Z"),
ty::BoundPod => mywrite!(w, "P"),
}
}
for &tp in bs.trait_bounds.iter() {
mywrite!(w, "I");
enc_trait_ref(w, cx, tp);
}
mywrite!(w, ".");
}
pub fn enc_type_param_def(w: &mut MemWriter, cx: @ctxt, v: &ty::TypeParameterDef) {
mywrite!(w, "{}:{}|", cx.tcx.sess.str_of(v.ident), (cx.ds)(v.def_id));
enc_bounds(w, cx, v.bounds);
}
| mywrite | identifier_name |
tyencode.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type encoding
use std::cell::RefCell;
use std::hashmap::HashMap;
use std::io;
use std::io::MemWriter;
use std::str;
use std::fmt;
use middle::ty::param_ty;
use middle::ty;
use syntax::abi::AbiSet;
use syntax::ast;
use syntax::ast::*;
use syntax::diagnostic::SpanHandler;
use syntax::print::pprust::*;
macro_rules! mywrite( ($wr:expr, $($arg:tt)*) => (
format_args!(|a| { mywrite($wr, a) }, $($arg)*)
) )
pub struct ctxt {
diag: @SpanHandler,
// Def -> str Callback:
ds: extern "Rust" fn(DefId) -> ~str,
// The type context.
tcx: ty::ctxt,
abbrevs: abbrev_ctxt
}
// Compact string representation for ty.t values. API ty_str & parse_from_str.
// Extra parameters are for converting to/from def_ids in the string rep.
// Whatever format you choose should not contain pipe characters.
pub struct ty_abbrev {
pos: uint,
len: uint,
s: @str
}
pub enum abbrev_ctxt {
ac_no_abbrevs,
ac_use_abbrevs(@RefCell<HashMap<ty::t, ty_abbrev>>),
}
fn mywrite(w: &mut MemWriter, fmt: &fmt::Arguments) {
fmt::write(&mut *w as &mut io::Writer, fmt);
}
pub fn enc_ty(w: &mut MemWriter, cx: @ctxt, t: ty::t) {
match cx.abbrevs {
ac_no_abbrevs => {
let result_str_opt;
{
let short_names_cache = cx.tcx.short_names_cache.borrow();
result_str_opt = short_names_cache.get()
.find(&t)
.map(|result| *result);
}
let result_str = match result_str_opt {
Some(s) => s,
None => {
let wr = &mut MemWriter::new();
enc_sty(wr, cx, &ty::get(t).sty);
let s = str::from_utf8(wr.get_ref()).to_managed();
let mut short_names_cache = cx.tcx
.short_names_cache
.borrow_mut();
short_names_cache.get().insert(t, s);
s
}
};
w.write(result_str.as_bytes());
}
ac_use_abbrevs(abbrevs) => {
{
let mut abbrevs = abbrevs.borrow_mut();
match abbrevs.get().find(&t) {
Some(a) => { w.write(a.s.as_bytes()); return; }
None => {}
}
}
let pos = w.tell();
enc_sty(w, cx, &ty::get(t).sty);
let end = w.tell();
let len = end - pos;
fn estimate_sz(u: u64) -> u64 {
let mut n = u;
let mut len = 0;
while n!= 0 { len += 1; n = n >> 4; }
return len;
}
let abbrev_len = 3 + estimate_sz(pos) + estimate_sz(len);
if abbrev_len < len {
// I.e. it's actually an abbreviation.
let s = format!("\\#{:x}:{:x}\\#", pos, len).to_managed();
let a = ty_abbrev { pos: pos as uint,
len: len as uint,
s: s };
{
let mut abbrevs = abbrevs.borrow_mut();
abbrevs.get().insert(t, a);
}
}
return;
}
}
}
fn enc_mutability(w: &mut MemWriter, mt: ast::Mutability) {
match mt {
MutImmutable => (),
MutMutable => mywrite!(w, "m"),
}
}
fn enc_mt(w: &mut MemWriter, cx: @ctxt, mt: ty::mt) {
enc_mutability(w, mt.mutbl);
enc_ty(w, cx, mt.ty);
}
fn enc_opt<T>(w: &mut MemWriter, t: Option<T>, enc_f: |&mut MemWriter, T|) {
match t {
None => mywrite!(w, "n"),
Some(v) => {
mywrite!(w, "s");
enc_f(w, v);
}
}
}
pub fn enc_substs(w: &mut MemWriter, cx: @ctxt, substs: &ty::substs) {
enc_region_substs(w, cx, &substs.regions);
enc_opt(w, substs.self_ty, |w, t| enc_ty(w, cx, t));
mywrite!(w, "[");
for t in substs.tps.iter() { enc_ty(w, cx, *t); }
mywrite!(w, "]");
}
fn enc_region_substs(w: &mut MemWriter, cx: @ctxt, substs: &ty::RegionSubsts) {
match *substs {
ty::ErasedRegions => {
mywrite!(w, "e");
}
ty::NonerasedRegions(ref regions) => {
mywrite!(w, "n");
for &r in regions.iter() {
enc_region(w, cx, r);
}
mywrite!(w, ".");
}
}
}
fn enc_region(w: &mut MemWriter, cx: @ctxt, r: ty::Region) {
match r {
ty::ReLateBound(id, br) => {
mywrite!(w, "b[{}|", id);
enc_bound_region(w, cx, br);
mywrite!(w, "]");
}
ty::ReEarlyBound(node_id, index, ident) => {
mywrite!(w, "B[{}|{}|{}]",
node_id,
index,
cx.tcx.sess.str_of(ident));
}
ty::ReFree(ref fr) => {
mywrite!(w, "f[{}|", fr.scope_id);
enc_bound_region(w, cx, fr.bound_region);
mywrite!(w, "]");
}
ty::ReScope(nid) => {
mywrite!(w, "s{}|", nid);
}
ty::ReStatic => {
mywrite!(w, "t");
}
ty::ReEmpty => {
mywrite!(w, "e");
}
ty::ReInfer(_) => {
// these should not crop up after typeck
cx.diag.handler().bug("Cannot encode region variables");
}
}
}
fn enc_bound_region(w: &mut MemWriter, cx: @ctxt, br: ty::BoundRegion) {
match br {
ty::BrAnon(idx) => {
mywrite!(w, "a{}|", idx);
}
ty::BrNamed(d, s) => |
ty::BrFresh(id) => {
mywrite!(w, "f{}|", id);
}
}
}
pub fn enc_vstore(w: &mut MemWriter, cx: @ctxt, v: ty::vstore) {
mywrite!(w, "/");
match v {
ty::vstore_fixed(u) => mywrite!(w, "{}|", u),
ty::vstore_uniq => mywrite!(w, "~"),
ty::vstore_box => mywrite!(w, "@"),
ty::vstore_slice(r) => {
mywrite!(w, "&");
enc_region(w, cx, r);
}
}
}
pub fn enc_trait_ref(w: &mut MemWriter, cx: @ctxt, s: &ty::TraitRef) {
mywrite!(w, "{}|", (cx.ds)(s.def_id));
enc_substs(w, cx, &s.substs);
}
pub fn enc_trait_store(w: &mut MemWriter, cx: @ctxt, s: ty::TraitStore) {
match s {
ty::UniqTraitStore => mywrite!(w, "~"),
ty::BoxTraitStore => mywrite!(w, "@"),
ty::RegionTraitStore(re) => {
mywrite!(w, "&");
enc_region(w, cx, re);
}
}
}
fn enc_sty(w: &mut MemWriter, cx: @ctxt, st: &ty::sty) {
match *st {
ty::ty_nil => mywrite!(w, "n"),
ty::ty_bot => mywrite!(w, "z"),
ty::ty_bool => mywrite!(w, "b"),
ty::ty_char => mywrite!(w, "c"),
ty::ty_int(t) => {
match t {
TyI => mywrite!(w, "i"),
TyI8 => mywrite!(w, "MB"),
TyI16 => mywrite!(w, "MW"),
TyI32 => mywrite!(w, "ML"),
TyI64 => mywrite!(w, "MD")
}
}
ty::ty_uint(t) => {
match t {
TyU => mywrite!(w, "u"),
TyU8 => mywrite!(w, "Mb"),
TyU16 => mywrite!(w, "Mw"),
TyU32 => mywrite!(w, "Ml"),
TyU64 => mywrite!(w, "Md")
}
}
ty::ty_float(t) => {
match t {
TyF32 => mywrite!(w, "Mf"),
TyF64 => mywrite!(w, "MF"),
}
}
ty::ty_enum(def, ref substs) => {
mywrite!(w, "t[{}|", (cx.ds)(def));
enc_substs(w, cx, substs);
mywrite!(w, "]");
}
ty::ty_trait(def, ref substs, store, mt, bounds) => {
mywrite!(w, "x[{}|", (cx.ds)(def));
enc_substs(w, cx, substs);
enc_trait_store(w, cx, store);
enc_mutability(w, mt);
let bounds = ty::ParamBounds {builtin_bounds: bounds,
trait_bounds: ~[]};
enc_bounds(w, cx, &bounds);
mywrite!(w, "]");
}
ty::ty_tup(ref ts) => {
mywrite!(w, "T[");
for t in ts.iter() { enc_ty(w, cx, *t); }
mywrite!(w, "]");
}
ty::ty_box(typ) => { mywrite!(w, "@"); enc_ty(w, cx, typ); }
ty::ty_uniq(typ) => { mywrite!(w, "~"); enc_ty(w, cx, typ); }
ty::ty_ptr(mt) => { mywrite!(w, "*"); enc_mt(w, cx, mt); }
ty::ty_rptr(r, mt) => {
mywrite!(w, "&");
enc_region(w, cx, r);
enc_mt(w, cx, mt);
}
ty::ty_vec(mt, v) => {
mywrite!(w, "V");
enc_mt(w, cx, mt);
enc_vstore(w, cx, v);
}
ty::ty_str(v) => {
mywrite!(w, "v");
enc_vstore(w, cx, v);
}
ty::ty_unboxed_vec(mt) => { mywrite!(w, "U"); enc_mt(w, cx, mt); }
ty::ty_closure(ref f) => {
mywrite!(w, "f");
enc_closure_ty(w, cx, f);
}
ty::ty_bare_fn(ref f) => {
mywrite!(w, "F");
enc_bare_fn_ty(w, cx, f);
}
ty::ty_infer(_) => {
cx.diag.handler().bug("Cannot encode inference variable types");
}
ty::ty_param(param_ty {idx: id, def_id: did}) => {
mywrite!(w, "p{}|{}", (cx.ds)(did), id);
}
ty::ty_self(did) => {
mywrite!(w, "s{}|", (cx.ds)(did));
}
ty::ty_type => mywrite!(w, "Y"),
ty::ty_opaque_closure_ptr(p) => {
mywrite!(w, "C&");
enc_sigil(w, p);
}
ty::ty_struct(def, ref substs) => {
mywrite!(w, "a[{}|", (cx.ds)(def));
enc_substs(w, cx, substs);
mywrite!(w, "]");
}
ty::ty_err => fail!("Shouldn't encode error type")
}
}
fn enc_sigil(w: &mut MemWriter, sigil: Sigil) {
match sigil {
ManagedSigil => mywrite!(w, "@"),
OwnedSigil => mywrite!(w, "~"),
BorrowedSigil => mywrite!(w, "&"),
}
}
fn enc_purity(w: &mut MemWriter, p: Purity) {
match p {
ImpureFn => mywrite!(w, "i"),
UnsafeFn => mywrite!(w, "u"),
ExternFn => mywrite!(w, "c")
}
}
fn enc_abi_set(w: &mut MemWriter, abis: AbiSet) {
mywrite!(w, "[");
abis.each(|abi| {
mywrite!(w, "{},", abi.name());
true
});
mywrite!(w, "]")
}
fn enc_onceness(w: &mut MemWriter, o: Onceness) {
match o {
Once => mywrite!(w, "o"),
Many => mywrite!(w, "m")
}
}
pub fn enc_bare_fn_ty(w: &mut MemWriter, cx: @ctxt, ft: &ty::BareFnTy) {
enc_purity(w, ft.purity);
enc_abi_set(w, ft.abis);
enc_fn_sig(w, cx, &ft.sig);
}
fn enc_closure_ty(w: &mut MemWriter, cx: @ctxt, ft: &ty::ClosureTy) {
enc_sigil(w, ft.sigil);
enc_purity(w, ft.purity);
enc_onceness(w, ft.onceness);
enc_region(w, cx, ft.region);
let bounds = ty::ParamBounds {builtin_bounds: ft.bounds,
trait_bounds: ~[]};
enc_bounds(w, cx, &bounds);
enc_fn_sig(w, cx, &ft.sig);
}
fn enc_fn_sig(w: &mut MemWriter, cx: @ctxt, fsig: &ty::FnSig) {
mywrite!(w, "[{}|", fsig.binder_id);
for ty in fsig.inputs.iter() {
enc_ty(w, cx, *ty);
}
mywrite!(w, "]");
if fsig.variadic {
mywrite!(w, "V");
} else {
mywrite!(w, "N");
}
enc_ty(w, cx, fsig.output);
}
fn enc_bounds(w: &mut MemWriter, cx: @ctxt, bs: &ty::ParamBounds) {
for bound in bs.builtin_bounds.iter() {
match bound {
ty::BoundSend => mywrite!(w, "S"),
ty::BoundFreeze => mywrite!(w, "K"),
ty::BoundStatic => mywrite!(w, "O"),
ty::BoundSized => mywrite!(w, "Z"),
ty::BoundPod => mywrite!(w, "P"),
}
}
for &tp in bs.trait_bounds.iter() {
mywrite!(w, "I");
enc_trait_ref(w, cx, tp);
}
mywrite!(w, ".");
}
pub fn enc_type_param_def(w: &mut MemWriter, cx: @ctxt, v: &ty::TypeParameterDef) {
mywrite!(w, "{}:{}|", cx.tcx.sess.str_of(v.ident), (cx.ds)(v.def_id));
enc_bounds(w, cx, v.bounds);
}
| {
mywrite!(w, "[{}|{}]",
(cx.ds)(d),
cx.tcx.sess.str_of(s));
} | conditional_block |
tyencode.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type encoding
use std::cell::RefCell;
use std::hashmap::HashMap;
use std::io;
use std::io::MemWriter;
use std::str;
use std::fmt;
use middle::ty::param_ty;
use middle::ty;
use syntax::abi::AbiSet;
use syntax::ast;
use syntax::ast::*;
use syntax::diagnostic::SpanHandler;
use syntax::print::pprust::*;
macro_rules! mywrite( ($wr:expr, $($arg:tt)*) => (
format_args!(|a| { mywrite($wr, a) }, $($arg)*)
) )
pub struct ctxt {
diag: @SpanHandler,
// Def -> str Callback:
ds: extern "Rust" fn(DefId) -> ~str,
// The type context.
tcx: ty::ctxt,
abbrevs: abbrev_ctxt
}
// Compact string representation for ty.t values. API ty_str & parse_from_str.
// Extra parameters are for converting to/from def_ids in the string rep.
// Whatever format you choose should not contain pipe characters.
pub struct ty_abbrev {
pos: uint,
len: uint,
s: @str
}
pub enum abbrev_ctxt {
ac_no_abbrevs,
ac_use_abbrevs(@RefCell<HashMap<ty::t, ty_abbrev>>),
}
fn mywrite(w: &mut MemWriter, fmt: &fmt::Arguments) {
fmt::write(&mut *w as &mut io::Writer, fmt);
}
pub fn enc_ty(w: &mut MemWriter, cx: @ctxt, t: ty::t) {
match cx.abbrevs {
ac_no_abbrevs => {
let result_str_opt;
{
let short_names_cache = cx.tcx.short_names_cache.borrow();
result_str_opt = short_names_cache.get()
.find(&t)
.map(|result| *result);
}
let result_str = match result_str_opt {
Some(s) => s,
None => {
let wr = &mut MemWriter::new();
enc_sty(wr, cx, &ty::get(t).sty);
let s = str::from_utf8(wr.get_ref()).to_managed();
let mut short_names_cache = cx.tcx
.short_names_cache
.borrow_mut();
short_names_cache.get().insert(t, s);
s
}
};
w.write(result_str.as_bytes());
}
ac_use_abbrevs(abbrevs) => {
{
let mut abbrevs = abbrevs.borrow_mut();
match abbrevs.get().find(&t) {
Some(a) => { w.write(a.s.as_bytes()); return; }
None => {}
}
}
let pos = w.tell();
enc_sty(w, cx, &ty::get(t).sty);
let end = w.tell();
let len = end - pos;
fn estimate_sz(u: u64) -> u64 {
let mut n = u;
let mut len = 0;
while n!= 0 { len += 1; n = n >> 4; }
return len;
}
let abbrev_len = 3 + estimate_sz(pos) + estimate_sz(len);
if abbrev_len < len {
// I.e. it's actually an abbreviation.
let s = format!("\\#{:x}:{:x}\\#", pos, len).to_managed();
let a = ty_abbrev { pos: pos as uint,
len: len as uint,
s: s };
{
let mut abbrevs = abbrevs.borrow_mut();
abbrevs.get().insert(t, a);
}
}
return;
}
}
}
fn enc_mutability(w: &mut MemWriter, mt: ast::Mutability) {
match mt {
MutImmutable => (),
MutMutable => mywrite!(w, "m"),
}
}
fn enc_mt(w: &mut MemWriter, cx: @ctxt, mt: ty::mt) {
enc_mutability(w, mt.mutbl);
enc_ty(w, cx, mt.ty);
}
fn enc_opt<T>(w: &mut MemWriter, t: Option<T>, enc_f: |&mut MemWriter, T|) {
match t {
None => mywrite!(w, "n"),
Some(v) => {
mywrite!(w, "s");
enc_f(w, v);
}
}
}
pub fn enc_substs(w: &mut MemWriter, cx: @ctxt, substs: &ty::substs) |
fn enc_region_substs(w: &mut MemWriter, cx: @ctxt, substs: &ty::RegionSubsts) {
match *substs {
ty::ErasedRegions => {
mywrite!(w, "e");
}
ty::NonerasedRegions(ref regions) => {
mywrite!(w, "n");
for &r in regions.iter() {
enc_region(w, cx, r);
}
mywrite!(w, ".");
}
}
}
fn enc_region(w: &mut MemWriter, cx: @ctxt, r: ty::Region) {
match r {
ty::ReLateBound(id, br) => {
mywrite!(w, "b[{}|", id);
enc_bound_region(w, cx, br);
mywrite!(w, "]");
}
ty::ReEarlyBound(node_id, index, ident) => {
mywrite!(w, "B[{}|{}|{}]",
node_id,
index,
cx.tcx.sess.str_of(ident));
}
ty::ReFree(ref fr) => {
mywrite!(w, "f[{}|", fr.scope_id);
enc_bound_region(w, cx, fr.bound_region);
mywrite!(w, "]");
}
ty::ReScope(nid) => {
mywrite!(w, "s{}|", nid);
}
ty::ReStatic => {
mywrite!(w, "t");
}
ty::ReEmpty => {
mywrite!(w, "e");
}
ty::ReInfer(_) => {
// these should not crop up after typeck
cx.diag.handler().bug("Cannot encode region variables");
}
}
}
fn enc_bound_region(w: &mut MemWriter, cx: @ctxt, br: ty::BoundRegion) {
match br {
ty::BrAnon(idx) => {
mywrite!(w, "a{}|", idx);
}
ty::BrNamed(d, s) => {
mywrite!(w, "[{}|{}]",
(cx.ds)(d),
cx.tcx.sess.str_of(s));
}
ty::BrFresh(id) => {
mywrite!(w, "f{}|", id);
}
}
}
pub fn enc_vstore(w: &mut MemWriter, cx: @ctxt, v: ty::vstore) {
mywrite!(w, "/");
match v {
ty::vstore_fixed(u) => mywrite!(w, "{}|", u),
ty::vstore_uniq => mywrite!(w, "~"),
ty::vstore_box => mywrite!(w, "@"),
ty::vstore_slice(r) => {
mywrite!(w, "&");
enc_region(w, cx, r);
}
}
}
pub fn enc_trait_ref(w: &mut MemWriter, cx: @ctxt, s: &ty::TraitRef) {
mywrite!(w, "{}|", (cx.ds)(s.def_id));
enc_substs(w, cx, &s.substs);
}
pub fn enc_trait_store(w: &mut MemWriter, cx: @ctxt, s: ty::TraitStore) {
match s {
ty::UniqTraitStore => mywrite!(w, "~"),
ty::BoxTraitStore => mywrite!(w, "@"),
ty::RegionTraitStore(re) => {
mywrite!(w, "&");
enc_region(w, cx, re);
}
}
}
fn enc_sty(w: &mut MemWriter, cx: @ctxt, st: &ty::sty) {
match *st {
ty::ty_nil => mywrite!(w, "n"),
ty::ty_bot => mywrite!(w, "z"),
ty::ty_bool => mywrite!(w, "b"),
ty::ty_char => mywrite!(w, "c"),
ty::ty_int(t) => {
match t {
TyI => mywrite!(w, "i"),
TyI8 => mywrite!(w, "MB"),
TyI16 => mywrite!(w, "MW"),
TyI32 => mywrite!(w, "ML"),
TyI64 => mywrite!(w, "MD")
}
}
ty::ty_uint(t) => {
match t {
TyU => mywrite!(w, "u"),
TyU8 => mywrite!(w, "Mb"),
TyU16 => mywrite!(w, "Mw"),
TyU32 => mywrite!(w, "Ml"),
TyU64 => mywrite!(w, "Md")
}
}
ty::ty_float(t) => {
match t {
TyF32 => mywrite!(w, "Mf"),
TyF64 => mywrite!(w, "MF"),
}
}
ty::ty_enum(def, ref substs) => {
mywrite!(w, "t[{}|", (cx.ds)(def));
enc_substs(w, cx, substs);
mywrite!(w, "]");
}
ty::ty_trait(def, ref substs, store, mt, bounds) => {
mywrite!(w, "x[{}|", (cx.ds)(def));
enc_substs(w, cx, substs);
enc_trait_store(w, cx, store);
enc_mutability(w, mt);
let bounds = ty::ParamBounds {builtin_bounds: bounds,
trait_bounds: ~[]};
enc_bounds(w, cx, &bounds);
mywrite!(w, "]");
}
ty::ty_tup(ref ts) => {
mywrite!(w, "T[");
for t in ts.iter() { enc_ty(w, cx, *t); }
mywrite!(w, "]");
}
ty::ty_box(typ) => { mywrite!(w, "@"); enc_ty(w, cx, typ); }
ty::ty_uniq(typ) => { mywrite!(w, "~"); enc_ty(w, cx, typ); }
ty::ty_ptr(mt) => { mywrite!(w, "*"); enc_mt(w, cx, mt); }
ty::ty_rptr(r, mt) => {
mywrite!(w, "&");
enc_region(w, cx, r);
enc_mt(w, cx, mt);
}
ty::ty_vec(mt, v) => {
mywrite!(w, "V");
enc_mt(w, cx, mt);
enc_vstore(w, cx, v);
}
ty::ty_str(v) => {
mywrite!(w, "v");
enc_vstore(w, cx, v);
}
ty::ty_unboxed_vec(mt) => { mywrite!(w, "U"); enc_mt(w, cx, mt); }
ty::ty_closure(ref f) => {
mywrite!(w, "f");
enc_closure_ty(w, cx, f);
}
ty::ty_bare_fn(ref f) => {
mywrite!(w, "F");
enc_bare_fn_ty(w, cx, f);
}
ty::ty_infer(_) => {
cx.diag.handler().bug("Cannot encode inference variable types");
}
ty::ty_param(param_ty {idx: id, def_id: did}) => {
mywrite!(w, "p{}|{}", (cx.ds)(did), id);
}
ty::ty_self(did) => {
mywrite!(w, "s{}|", (cx.ds)(did));
}
ty::ty_type => mywrite!(w, "Y"),
ty::ty_opaque_closure_ptr(p) => {
mywrite!(w, "C&");
enc_sigil(w, p);
}
ty::ty_struct(def, ref substs) => {
mywrite!(w, "a[{}|", (cx.ds)(def));
enc_substs(w, cx, substs);
mywrite!(w, "]");
}
ty::ty_err => fail!("Shouldn't encode error type")
}
}
fn enc_sigil(w: &mut MemWriter, sigil: Sigil) {
match sigil {
ManagedSigil => mywrite!(w, "@"),
OwnedSigil => mywrite!(w, "~"),
BorrowedSigil => mywrite!(w, "&"),
}
}
fn enc_purity(w: &mut MemWriter, p: Purity) {
match p {
ImpureFn => mywrite!(w, "i"),
UnsafeFn => mywrite!(w, "u"),
ExternFn => mywrite!(w, "c")
}
}
fn enc_abi_set(w: &mut MemWriter, abis: AbiSet) {
mywrite!(w, "[");
abis.each(|abi| {
mywrite!(w, "{},", abi.name());
true
});
mywrite!(w, "]")
}
fn enc_onceness(w: &mut MemWriter, o: Onceness) {
match o {
Once => mywrite!(w, "o"),
Many => mywrite!(w, "m")
}
}
pub fn enc_bare_fn_ty(w: &mut MemWriter, cx: @ctxt, ft: &ty::BareFnTy) {
enc_purity(w, ft.purity);
enc_abi_set(w, ft.abis);
enc_fn_sig(w, cx, &ft.sig);
}
fn enc_closure_ty(w: &mut MemWriter, cx: @ctxt, ft: &ty::ClosureTy) {
enc_sigil(w, ft.sigil);
enc_purity(w, ft.purity);
enc_onceness(w, ft.onceness);
enc_region(w, cx, ft.region);
let bounds = ty::ParamBounds {builtin_bounds: ft.bounds,
trait_bounds: ~[]};
enc_bounds(w, cx, &bounds);
enc_fn_sig(w, cx, &ft.sig);
}
fn enc_fn_sig(w: &mut MemWriter, cx: @ctxt, fsig: &ty::FnSig) {
mywrite!(w, "[{}|", fsig.binder_id);
for ty in fsig.inputs.iter() {
enc_ty(w, cx, *ty);
}
mywrite!(w, "]");
if fsig.variadic {
mywrite!(w, "V");
} else {
mywrite!(w, "N");
}
enc_ty(w, cx, fsig.output);
}
fn enc_bounds(w: &mut MemWriter, cx: @ctxt, bs: &ty::ParamBounds) {
for bound in bs.builtin_bounds.iter() {
match bound {
ty::BoundSend => mywrite!(w, "S"),
ty::BoundFreeze => mywrite!(w, "K"),
ty::BoundStatic => mywrite!(w, "O"),
ty::BoundSized => mywrite!(w, "Z"),
ty::BoundPod => mywrite!(w, "P"),
}
}
for &tp in bs.trait_bounds.iter() {
mywrite!(w, "I");
enc_trait_ref(w, cx, tp);
}
mywrite!(w, ".");
}
pub fn enc_type_param_def(w: &mut MemWriter, cx: @ctxt, v: &ty::TypeParameterDef) {
mywrite!(w, "{}:{}|", cx.tcx.sess.str_of(v.ident), (cx.ds)(v.def_id));
enc_bounds(w, cx, v.bounds);
}
| {
enc_region_substs(w, cx, &substs.regions);
enc_opt(w, substs.self_ty, |w, t| enc_ty(w, cx, t));
mywrite!(w, "[");
for t in substs.tps.iter() { enc_ty(w, cx, *t); }
mywrite!(w, "]");
} | identifier_body |
id.rs | // Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. Please review the Licences for the specific language governing
// permissions and limitations relating to use of the SAFE Network Software.
use safe_nd::{AppFullId, ClientFullId, PublicId, PublicKey, Signature};
use std::sync::Arc;
/// An enum representing the Full Id variants for a Client or App.
#[derive(Clone)]
pub enum SafeKey {
/// Represents an application authorised by a client.
App(Arc<AppFullId>),
/// Represents a network client.
Client(Arc<ClientFullId>),
}
impl SafeKey {
/// Creates a client full ID.
pub fn | (full_id: ClientFullId) -> Self {
Self::Client(Arc::new(full_id))
}
/// Creates an app full ID.
pub fn app(full_id: AppFullId) -> Self {
Self::App(Arc::new(full_id))
}
/// Signs a given message using the App / Client full id as required.
pub fn sign(&self, msg: &[u8]) -> Signature {
match self {
Self::App(app_full_id) => app_full_id.sign(msg),
Self::Client(client_full_id) => client_full_id.sign(msg),
}
}
/// Returns a corresponding public ID.
pub fn public_id(&self) -> PublicId {
match self {
Self::App(app_full_id) => PublicId::App(app_full_id.public_id().clone()),
Self::Client(client_full_id) => PublicId::Client(client_full_id.public_id().clone()),
}
}
/// Returns a corresponding public key.
pub fn public_key(&self) -> PublicKey {
match self {
Self::App(app_full_id) => *app_full_id.public_id().public_key(),
Self::Client(client_full_id) => *client_full_id.public_id().public_key(),
}
}
}
| client | identifier_name |
id.rs | // Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. Please review the Licences for the specific language governing
// permissions and limitations relating to use of the SAFE Network Software.
use safe_nd::{AppFullId, ClientFullId, PublicId, PublicKey, Signature};
use std::sync::Arc;
/// An enum representing the Full Id variants for a Client or App.
#[derive(Clone)]
pub enum SafeKey {
/// Represents an application authorised by a client.
App(Arc<AppFullId>),
/// Represents a network client.
Client(Arc<ClientFullId>),
}
impl SafeKey {
/// Creates a client full ID.
pub fn client(full_id: ClientFullId) -> Self {
Self::Client(Arc::new(full_id))
}
/// Creates an app full ID.
pub fn app(full_id: AppFullId) -> Self {
Self::App(Arc::new(full_id))
}
/// Signs a given message using the App / Client full id as required.
pub fn sign(&self, msg: &[u8]) -> Signature { | }
}
/// Returns a corresponding public ID.
pub fn public_id(&self) -> PublicId {
match self {
Self::App(app_full_id) => PublicId::App(app_full_id.public_id().clone()),
Self::Client(client_full_id) => PublicId::Client(client_full_id.public_id().clone()),
}
}
/// Returns a corresponding public key.
pub fn public_key(&self) -> PublicKey {
match self {
Self::App(app_full_id) => *app_full_id.public_id().public_key(),
Self::Client(client_full_id) => *client_full_id.public_id().public_key(),
}
}
} | match self {
Self::App(app_full_id) => app_full_id.sign(msg),
Self::Client(client_full_id) => client_full_id.sign(msg), | random_line_split |
id.rs | // Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. Please review the Licences for the specific language governing
// permissions and limitations relating to use of the SAFE Network Software.
use safe_nd::{AppFullId, ClientFullId, PublicId, PublicKey, Signature};
use std::sync::Arc;
/// An enum representing the Full Id variants for a Client or App.
#[derive(Clone)]
pub enum SafeKey {
/// Represents an application authorised by a client.
App(Arc<AppFullId>),
/// Represents a network client.
Client(Arc<ClientFullId>),
}
impl SafeKey {
/// Creates a client full ID.
pub fn client(full_id: ClientFullId) -> Self |
/// Creates an app full ID.
pub fn app(full_id: AppFullId) -> Self {
Self::App(Arc::new(full_id))
}
/// Signs a given message using the App / Client full id as required.
pub fn sign(&self, msg: &[u8]) -> Signature {
match self {
Self::App(app_full_id) => app_full_id.sign(msg),
Self::Client(client_full_id) => client_full_id.sign(msg),
}
}
/// Returns a corresponding public ID.
pub fn public_id(&self) -> PublicId {
match self {
Self::App(app_full_id) => PublicId::App(app_full_id.public_id().clone()),
Self::Client(client_full_id) => PublicId::Client(client_full_id.public_id().clone()),
}
}
/// Returns a corresponding public key.
pub fn public_key(&self) -> PublicKey {
match self {
Self::App(app_full_id) => *app_full_id.public_id().public_key(),
Self::Client(client_full_id) => *client_full_id.public_id().public_key(),
}
}
}
| {
Self::Client(Arc::new(full_id))
} | identifier_body |
pkgid.rs | use cargo::ops;
use cargo::util::{CliResult, CliError, Config};
use cargo::util::important_paths::{find_root_manifest_for_cwd};
#[derive(RustcDecodable)]
struct Options {
flag_verbose: bool,
flag_quiet: bool,
flag_manifest_path: Option<String>,
arg_spec: Option<String>,
}
pub const USAGE: &'static str = "
Print a fully qualified package specification
Usage:
cargo pkgid [options] [<spec>]
Options:
-h, --help Print this message
--manifest-path PATH Path to the manifest to the package to clean
-v, --verbose Use verbose output
-q, --quiet No output printed to stdout
Given a <spec> argument, print out the fully qualified package id specifier.
This command will generate an error if <spec> is ambiguous as to which package
it refers to in the dependency graph. If no <spec> is given, then the pkgid for
the local package is printed.
This command requires that a lockfile is available and dependencies have been
fetched.
Example Package IDs
pkgid | name | version | url
|-----------------------------|--------|-----------|---------------------|
foo | foo | * | *
foo:1.2.3 | foo | 1.2.3 | *
crates.io/foo | foo | * | *://crates.io/foo
crates.io/foo#1.2.3 | foo | 1.2.3 | *://crates.io/foo
crates.io/bar#foo:1.2.3 | foo | 1.2.3 | *://crates.io/bar
http://crates.io/foo#1.2.3 | foo | 1.2.3 | http://crates.io/foo
";
pub fn | (options: Options,
config: &Config) -> CliResult<Option<()>> {
try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet));
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path.clone()));
let spec = options.arg_spec.as_ref().map(|s| &s[..]);
let spec = try!(ops::pkgid(&root, spec, config).map_err(|err| {
CliError::from_boxed(err, 101)
}));
println!("{}", spec);
Ok(None)
}
| execute | identifier_name |
pkgid.rs | use cargo::ops;
use cargo::util::{CliResult, CliError, Config};
use cargo::util::important_paths::{find_root_manifest_for_cwd};
#[derive(RustcDecodable)]
struct Options {
flag_verbose: bool,
flag_quiet: bool,
flag_manifest_path: Option<String>,
arg_spec: Option<String>,
}
pub const USAGE: &'static str = "
Print a fully qualified package specification
Usage:
cargo pkgid [options] [<spec>]
Options:
-h, --help Print this message
--manifest-path PATH Path to the manifest to the package to clean
-v, --verbose Use verbose output
-q, --quiet No output printed to stdout
Given a <spec> argument, print out the fully qualified package id specifier.
This command will generate an error if <spec> is ambiguous as to which package
it refers to in the dependency graph. If no <spec> is given, then the pkgid for
the local package is printed.
This command requires that a lockfile is available and dependencies have been
fetched.
Example Package IDs
pkgid | name | version | url
|-----------------------------|--------|-----------|---------------------|
foo | foo | * | * | crates.io/foo | foo | * | *://crates.io/foo
crates.io/foo#1.2.3 | foo | 1.2.3 | *://crates.io/foo
crates.io/bar#foo:1.2.3 | foo | 1.2.3 | *://crates.io/bar
http://crates.io/foo#1.2.3 | foo | 1.2.3 | http://crates.io/foo
";
pub fn execute(options: Options,
config: &Config) -> CliResult<Option<()>> {
try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet));
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path.clone()));
let spec = options.arg_spec.as_ref().map(|s| &s[..]);
let spec = try!(ops::pkgid(&root, spec, config).map_err(|err| {
CliError::from_boxed(err, 101)
}));
println!("{}", spec);
Ok(None)
} | foo:1.2.3 | foo | 1.2.3 | * | random_line_split |
question_6.rs | pub fn compress(string: &str) -> String {
let mut character_count = 0;
let mut previous_char = string.chars().nth(0).unwrap(); // Starts at first char
let mut new_string_parts: Vec<String> = vec![];
for c in string.chars() {
if previous_char == c {
character_count = character_count + 1;
} else {
new_string_parts.push(previous_char.to_string());
new_string_parts.push(character_count.to_string());
character_count = 1;
}
previous_char = c;
}
new_string_parts.push(previous_char.to_string());
new_string_parts.push(character_count.to_string());
let new_string = new_string_parts.join("");
if string.len() <= new_string.len() {
return string.to_string();
} else |
}
#[test]
fn example_compress() {
assert_eq!(compress("aabcccccaaa"), "a2b1c5a3");
}
#[test]
fn compress_should_return_original_string_when_not_smaller() {
assert_eq!(compress("aa"), "aa");
}
#[test]
fn compress_should_return_original_string_when_not_smaller_with_larger_example() {
assert_eq!(compress("aabbccddeeffgg"), "aabbccddeeffgg");
}
#[test]
fn compress_should_return_original_string_when_compression_generates_larger_string() {
// if compress() had its way "abcdee" would be "a1b1c1d1e2"
assert_eq!(compress("abcdee"), "abcdee");
}
| {
return new_string_parts.join("");
} | conditional_block |
question_6.rs | pub fn compress(string: &str) -> String {
let mut character_count = 0;
let mut previous_char = string.chars().nth(0).unwrap(); // Starts at first char
let mut new_string_parts: Vec<String> = vec![];
for c in string.chars() {
if previous_char == c {
character_count = character_count + 1;
} else {
new_string_parts.push(previous_char.to_string());
new_string_parts.push(character_count.to_string());
character_count = 1;
}
previous_char = c;
}
new_string_parts.push(previous_char.to_string());
new_string_parts.push(character_count.to_string());
let new_string = new_string_parts.join("");
if string.len() <= new_string.len() {
return string.to_string();
} else {
return new_string_parts.join("");
}
}
#[test]
fn example_compress() {
assert_eq!(compress("aabcccccaaa"), "a2b1c5a3");
}
#[test]
fn | () {
assert_eq!(compress("aa"), "aa");
}
#[test]
fn compress_should_return_original_string_when_not_smaller_with_larger_example() {
assert_eq!(compress("aabbccddeeffgg"), "aabbccddeeffgg");
}
#[test]
fn compress_should_return_original_string_when_compression_generates_larger_string() {
// if compress() had its way "abcdee" would be "a1b1c1d1e2"
assert_eq!(compress("abcdee"), "abcdee");
}
| compress_should_return_original_string_when_not_smaller | identifier_name |
question_6.rs | pub fn compress(string: &str) -> String {
let mut character_count = 0;
let mut previous_char = string.chars().nth(0).unwrap(); // Starts at first char
let mut new_string_parts: Vec<String> = vec![];
for c in string.chars() {
if previous_char == c {
character_count = character_count + 1;
} else {
new_string_parts.push(previous_char.to_string());
new_string_parts.push(character_count.to_string());
character_count = 1; | previous_char = c;
}
new_string_parts.push(previous_char.to_string());
new_string_parts.push(character_count.to_string());
let new_string = new_string_parts.join("");
if string.len() <= new_string.len() {
return string.to_string();
} else {
return new_string_parts.join("");
}
}
#[test]
fn example_compress() {
assert_eq!(compress("aabcccccaaa"), "a2b1c5a3");
}
#[test]
fn compress_should_return_original_string_when_not_smaller() {
assert_eq!(compress("aa"), "aa");
}
#[test]
fn compress_should_return_original_string_when_not_smaller_with_larger_example() {
assert_eq!(compress("aabbccddeeffgg"), "aabbccddeeffgg");
}
#[test]
fn compress_should_return_original_string_when_compression_generates_larger_string() {
// if compress() had its way "abcdee" would be "a1b1c1d1e2"
assert_eq!(compress("abcdee"), "abcdee");
} | } | random_line_split |
question_6.rs | pub fn compress(string: &str) -> String {
let mut character_count = 0;
let mut previous_char = string.chars().nth(0).unwrap(); // Starts at first char
let mut new_string_parts: Vec<String> = vec![];
for c in string.chars() {
if previous_char == c {
character_count = character_count + 1;
} else {
new_string_parts.push(previous_char.to_string());
new_string_parts.push(character_count.to_string());
character_count = 1;
}
previous_char = c;
}
new_string_parts.push(previous_char.to_string());
new_string_parts.push(character_count.to_string());
let new_string = new_string_parts.join("");
if string.len() <= new_string.len() {
return string.to_string();
} else {
return new_string_parts.join("");
}
}
#[test]
fn example_compress() |
#[test]
fn compress_should_return_original_string_when_not_smaller() {
assert_eq!(compress("aa"), "aa");
}
#[test]
fn compress_should_return_original_string_when_not_smaller_with_larger_example() {
assert_eq!(compress("aabbccddeeffgg"), "aabbccddeeffgg");
}
#[test]
fn compress_should_return_original_string_when_compression_generates_larger_string() {
// if compress() had its way "abcdee" would be "a1b1c1d1e2"
assert_eq!(compress("abcdee"), "abcdee");
}
| {
assert_eq!(compress("aabcccccaaa"), "a2b1c5a3");
} | identifier_body |
type_name.rs | #![feature(core, core_intrinsics)]
extern crate core;
#[cfg(test)]
mod tests {
use core::intrinsics::type_name;
// pub fn type_name<T>() -> usize;
macro_rules! type_name_test {
($T:ty, $message:expr) => ({
let message: &'static str = unsafe { type_name::<$T>() };
assert_eq!(message, $message);
})
}
#[test]
fn | () {
type_name_test!( u8, "u8" );
type_name_test!( u16, "u16" );
type_name_test!( u32, "u32" );
type_name_test!( u64, "u64" );
type_name_test!( i8, "i8" );
type_name_test!( i16, "i16" );
type_name_test!( i32, "i32" );
type_name_test!( i64, "i64" );
type_name_test!( f32, "f32" );
type_name_test!( f64, "f64" );
type_name_test!( [u8; 0], "[u8; 0]" );
type_name_test!( [u8; 68], "[u8; 68]" );
type_name_test!( [u32; 0], "[u32; 0]" );
type_name_test!( [u32; 68], "[u32; 68]" );
type_name_test!( (u8,), "(u8,)" );
type_name_test!( (u8, u16), "(u8, u16)" );
type_name_test!( (u8, u16, u32), "(u8, u16, u32)" );
type_name_test!( (u8, u16, u32, u64), "(u8, u16, u32, u64)" );
}
}
| type_name_test1 | identifier_name |
type_name.rs | #![feature(core, core_intrinsics)]
extern crate core;
#[cfg(test)]
mod tests {
use core::intrinsics::type_name;
// pub fn type_name<T>() -> usize;
macro_rules! type_name_test {
($T:ty, $message:expr) => ({
let message: &'static str = unsafe { type_name::<$T>() };
assert_eq!(message, $message);
})
}
#[test]
fn type_name_test1() | type_name_test!( (u8, u16), "(u8, u16)" );
type_name_test!( (u8, u16, u32), "(u8, u16, u32)" );
type_name_test!( (u8, u16, u32, u64), "(u8, u16, u32, u64)" );
}
}
| {
type_name_test!( u8, "u8" );
type_name_test!( u16, "u16" );
type_name_test!( u32, "u32" );
type_name_test!( u64, "u64" );
type_name_test!( i8, "i8" );
type_name_test!( i16, "i16" );
type_name_test!( i32, "i32" );
type_name_test!( i64, "i64" );
type_name_test!( f32, "f32" );
type_name_test!( f64, "f64" );
type_name_test!( [u8; 0], "[u8; 0]" );
type_name_test!( [u8; 68], "[u8; 68]" );
type_name_test!( [u32; 0], "[u32; 0]" );
type_name_test!( [u32; 68], "[u32; 68]" );
type_name_test!( (u8,), "(u8,)" ); | identifier_body |
type_name.rs | #![feature(core, core_intrinsics)]
extern crate core;
#[cfg(test)]
mod tests {
use core::intrinsics::type_name;
// pub fn type_name<T>() -> usize;
macro_rules! type_name_test {
($T:ty, $message:expr) => ({
let message: &'static str = unsafe { type_name::<$T>() };
assert_eq!(message, $message);
})
}
#[test]
fn type_name_test1() { | type_name_test!( i8, "i8" );
type_name_test!( i16, "i16" );
type_name_test!( i32, "i32" );
type_name_test!( i64, "i64" );
type_name_test!( f32, "f32" );
type_name_test!( f64, "f64" );
type_name_test!( [u8; 0], "[u8; 0]" );
type_name_test!( [u8; 68], "[u8; 68]" );
type_name_test!( [u32; 0], "[u32; 0]" );
type_name_test!( [u32; 68], "[u32; 68]" );
type_name_test!( (u8,), "(u8,)" );
type_name_test!( (u8, u16), "(u8, u16)" );
type_name_test!( (u8, u16, u32), "(u8, u16, u32)" );
type_name_test!( (u8, u16, u32, u64), "(u8, u16, u32, u64)" );
}
} | type_name_test!( u8, "u8" );
type_name_test!( u16, "u16" );
type_name_test!( u32, "u32" );
type_name_test!( u64, "u64" ); | random_line_split |
main.rs | // Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,
// refer to the AUTHORS file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern mod glfw;
use std::libc;
use std::unstable::finally::Finally;
#[start]
fn start(argc: int, argv: **u8, crate_map: *u8) -> int {
// GLFW must run on the main platform thread
std::rt::start_on_main_thread(argc, argv, crate_map, main)
}
fn main() {
glfw::set_error_callback(error_callback);
if glfw::init().is_err() {
fail!(~"Failed to initialize GLFW");
} else {
(||{
let window = glfw::Window::create(300, 300, "Hello this is window", glfw::Windowed).unwrap();
window.set_key_callback(key_callback);
window.make_context_current();
while!window.should_close() {
window.poll_events();
glfw::poll_events();
}
// Use `finally` to ensure that `glfw::terminate` is called even if a failure occurs
}).finally(glfw::terminate);
}
}
fn key_callback(window: &glfw::Window, key: libc::c_int, _: libc::c_int, action: libc::c_int, _: glfw::KeyMods) {
if action == glfw::PRESS && key == glfw::KEY_ESCAPE {
window.set_should_close(true);
}
}
fn | (_: libc::c_int, description: ~str) {
println(fmt!("GLFW Error: %s", description));
}
| error_callback | identifier_name |
main.rs | // Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,
// refer to the AUTHORS file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern mod glfw;
use std::libc;
use std::unstable::finally::Finally;
#[start]
fn start(argc: int, argv: **u8, crate_map: *u8) -> int {
// GLFW must run on the main platform thread
std::rt::start_on_main_thread(argc, argv, crate_map, main)
}
fn main() |
fn key_callback(window: &glfw::Window, key: libc::c_int, _: libc::c_int, action: libc::c_int, _: glfw::KeyMods) {
if action == glfw::PRESS && key == glfw::KEY_ESCAPE {
window.set_should_close(true);
}
}
fn error_callback(_: libc::c_int, description: ~str) {
println(fmt!("GLFW Error: %s", description));
}
| {
glfw::set_error_callback(error_callback);
if glfw::init().is_err() {
fail!(~"Failed to initialize GLFW");
} else {
(||{
let window = glfw::Window::create(300, 300, "Hello this is window", glfw::Windowed).unwrap();
window.set_key_callback(key_callback);
window.make_context_current();
while !window.should_close() {
window.poll_events();
glfw::poll_events();
}
// Use `finally` to ensure that `glfw::terminate` is called even if a failure occurs
}).finally(glfw::terminate);
}
} | identifier_body |
main.rs | // Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,
// refer to the AUTHORS file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern mod glfw;
use std::libc;
use std::unstable::finally::Finally;
#[start]
fn start(argc: int, argv: **u8, crate_map: *u8) -> int {
// GLFW must run on the main platform thread
std::rt::start_on_main_thread(argc, argv, crate_map, main)
}
fn main() {
glfw::set_error_callback(error_callback);
if glfw::init().is_err() {
fail!(~"Failed to initialize GLFW"); | } else {
(||{
let window = glfw::Window::create(300, 300, "Hello this is window", glfw::Windowed).unwrap();
window.set_key_callback(key_callback);
window.make_context_current();
while!window.should_close() {
window.poll_events();
glfw::poll_events();
}
// Use `finally` to ensure that `glfw::terminate` is called even if a failure occurs
}).finally(glfw::terminate);
}
}
fn key_callback(window: &glfw::Window, key: libc::c_int, _: libc::c_int, action: libc::c_int, _: glfw::KeyMods) {
if action == glfw::PRESS && key == glfw::KEY_ESCAPE {
window.set_should_close(true);
}
}
fn error_callback(_: libc::c_int, description: ~str) {
println(fmt!("GLFW Error: %s", description));
} | random_line_split |
|
main.rs | // Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,
// refer to the AUTHORS file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern mod glfw;
use std::libc;
use std::unstable::finally::Finally;
#[start]
fn start(argc: int, argv: **u8, crate_map: *u8) -> int {
// GLFW must run on the main platform thread
std::rt::start_on_main_thread(argc, argv, crate_map, main)
}
fn main() {
glfw::set_error_callback(error_callback);
if glfw::init().is_err() | else {
(||{
let window = glfw::Window::create(300, 300, "Hello this is window", glfw::Windowed).unwrap();
window.set_key_callback(key_callback);
window.make_context_current();
while!window.should_close() {
window.poll_events();
glfw::poll_events();
}
// Use `finally` to ensure that `glfw::terminate` is called even if a failure occurs
}).finally(glfw::terminate);
}
}
fn key_callback(window: &glfw::Window, key: libc::c_int, _: libc::c_int, action: libc::c_int, _: glfw::KeyMods) {
if action == glfw::PRESS && key == glfw::KEY_ESCAPE {
window.set_should_close(true);
}
}
fn error_callback(_: libc::c_int, description: ~str) {
println(fmt!("GLFW Error: %s", description));
}
| {
fail!(~"Failed to initialize GLFW");
} | conditional_block |
annulus_distribution.rs | //! Implementation of a uniform distribuition of points on a two-dimensional
//! annulus.
use rand::distributions::Distribution;
use rand::Rng;
use std::f64::consts::PI;
pub use Point;
/// The uniform distribution of 2D points on an annulus `{x: r_1 <= |x| <= r_2}`.
pub struct AnnulusDist {
r1_sq: f64,
r2_sq: f64,
}
impl AnnulusDist {
/// Construct a new `AnnulusDist` with the given inner and outer radius
/// `r1`, `r2`. Panics if not `0 < r1 < r2`.
pub fn new(r1: f64, r2: f64) -> AnnulusDist {
assert!(0. < r1, "AnnulusDist::new called with `r1 <= 0`");
assert!(r1 < r2, "AnnulusDist::new called with `r2 <= r1`");
AnnulusDist { | }
impl Distribution<Point> for AnnulusDist {
fn sample<R: Rng +?Sized>(&self, rng: &mut R) -> Point {
// For points to be uniformly distributed in the annulus, the area of the disk with radius
// equal to the distance of the point from the origin is distributed uniformly between r₁²
// and r₂².
let r = (self.r1_sq + rng.gen::<f64>() * (self.r2_sq - self.r1_sq)).sqrt();
// The angle is uniform between 0 and 2π.
let (y, x) = (2. * PI * rng.gen::<f64>()).sin_cos();
Point(r * x, r * y)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn all_in_annulus() {
let r1 = 12.;
let r2 = 58.;
assert!(
AnnulusDist::new(r1, r2)
.sample_iter(&mut ::rand::thread_rng())
.take(1000)
.all(|p| {
let d = p.dist(Point(0., 0.));
r1 <= d && d <= r2
})
);
}
} | r1_sq: r1 * r1,
r2_sq: r2 * r2,
}
} | random_line_split |
annulus_distribution.rs | //! Implementation of a uniform distribuition of points on a two-dimensional
//! annulus.
use rand::distributions::Distribution;
use rand::Rng;
use std::f64::consts::PI;
pub use Point;
/// The uniform distribution of 2D points on an annulus `{x: r_1 <= |x| <= r_2}`.
pub struct | {
r1_sq: f64,
r2_sq: f64,
}
impl AnnulusDist {
/// Construct a new `AnnulusDist` with the given inner and outer radius
/// `r1`, `r2`. Panics if not `0 < r1 < r2`.
pub fn new(r1: f64, r2: f64) -> AnnulusDist {
assert!(0. < r1, "AnnulusDist::new called with `r1 <= 0`");
assert!(r1 < r2, "AnnulusDist::new called with `r2 <= r1`");
AnnulusDist {
r1_sq: r1 * r1,
r2_sq: r2 * r2,
}
}
}
impl Distribution<Point> for AnnulusDist {
fn sample<R: Rng +?Sized>(&self, rng: &mut R) -> Point {
// For points to be uniformly distributed in the annulus, the area of the disk with radius
// equal to the distance of the point from the origin is distributed uniformly between r₁²
// and r₂².
let r = (self.r1_sq + rng.gen::<f64>() * (self.r2_sq - self.r1_sq)).sqrt();
// The angle is uniform between 0 and 2π.
let (y, x) = (2. * PI * rng.gen::<f64>()).sin_cos();
Point(r * x, r * y)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn all_in_annulus() {
let r1 = 12.;
let r2 = 58.;
assert!(
AnnulusDist::new(r1, r2)
.sample_iter(&mut ::rand::thread_rng())
.take(1000)
.all(|p| {
let d = p.dist(Point(0., 0.));
r1 <= d && d <= r2
})
);
}
}
| AnnulusDist | identifier_name |
annulus_distribution.rs | //! Implementation of a uniform distribuition of points on a two-dimensional
//! annulus.
use rand::distributions::Distribution;
use rand::Rng;
use std::f64::consts::PI;
pub use Point;
/// The uniform distribution of 2D points on an annulus `{x: r_1 <= |x| <= r_2}`.
pub struct AnnulusDist {
r1_sq: f64,
r2_sq: f64,
}
impl AnnulusDist {
/// Construct a new `AnnulusDist` with the given inner and outer radius
/// `r1`, `r2`. Panics if not `0 < r1 < r2`.
pub fn new(r1: f64, r2: f64) -> AnnulusDist |
}
impl Distribution<Point> for AnnulusDist {
fn sample<R: Rng +?Sized>(&self, rng: &mut R) -> Point {
// For points to be uniformly distributed in the annulus, the area of the disk with radius
// equal to the distance of the point from the origin is distributed uniformly between r₁²
// and r₂².
let r = (self.r1_sq + rng.gen::<f64>() * (self.r2_sq - self.r1_sq)).sqrt();
// The angle is uniform between 0 and 2π.
let (y, x) = (2. * PI * rng.gen::<f64>()).sin_cos();
Point(r * x, r * y)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn all_in_annulus() {
let r1 = 12.;
let r2 = 58.;
assert!(
AnnulusDist::new(r1, r2)
.sample_iter(&mut ::rand::thread_rng())
.take(1000)
.all(|p| {
let d = p.dist(Point(0., 0.));
r1 <= d && d <= r2
})
);
}
}
| {
assert!(0. < r1, "AnnulusDist::new called with `r1 <= 0`");
assert!(r1 < r2, "AnnulusDist::new called with `r2 <= r1`");
AnnulusDist {
r1_sq: r1 * r1,
r2_sq: r2 * r2,
}
} | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.