file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
distinct.rs
|
use std::collections::{HashMap, HashSet};
use std::collections::hash_state::DefaultState;
use std::hash::{hash, Hash, SipHasher};
use std::default::Default;
use communication::Data;
use communication::pact::Exchange;
use construction::{Stream, GraphBuilder};
use construction::operators::unary::UnaryNotifyExt;
use serialization::Serializable;
pub trait DistinctExtensionTrait {
fn distinct(&self) -> Self;
fn distinct_batch(&self) -> Self;
}
impl<G: GraphBuilder, D: Data+Hash+Eq+Serializable> DistinctExtensionTrait for Stream<G, D>
where G::Timestamp: Hash {
fn distinct(&self) -> Stream<G, D> {
let mut elements: HashMap<_, HashSet<_, DefaultState<SipHasher>>> = HashMap::new();
let exch = Exchange::new(|x| hash::<_,SipHasher>(&x));
self.unary_notify(exch, "Distinct", vec![], move |input, output, notificator| {
while let Some((time, data)) = input.pull() {
let set = elements.entry(time).or_insert(Default::default());
let mut session = output.session(&time);
for datum in data.drain(..) {
if set.insert(datum.clone())
|
}
notificator.notify_at(&time);
}
while let Some((time, _count)) = notificator.next() {
elements.remove(&time);
}
})
}
fn distinct_batch(&self) -> Stream<G, D> {
let mut elements: HashMap<_, HashSet<_, DefaultState<SipHasher>>> = HashMap::new();
let exch = Exchange::new(|x| hash::<_,SipHasher>(&x));
self.unary_notify(exch, "DistinctBlock", vec![], move |input, output, notificator| {
while let Some((time, data)) = input.pull() {
let set = elements.entry(time).or_insert(Default::default());
for datum in data.drain(..) { set.insert(datum); }
notificator.notify_at(&time);
}
while let Some((time, _count)) = notificator.next() {
if let Some(mut data) = elements.remove(&time) {
output.give_at(&time, data.drain());
}
}
})
}
}
|
{
session.give(datum);
}
|
conditional_block
|
distinct.rs
|
use std::collections::{HashMap, HashSet};
use std::collections::hash_state::DefaultState;
|
use communication::pact::Exchange;
use construction::{Stream, GraphBuilder};
use construction::operators::unary::UnaryNotifyExt;
use serialization::Serializable;
pub trait DistinctExtensionTrait {
fn distinct(&self) -> Self;
fn distinct_batch(&self) -> Self;
}
impl<G: GraphBuilder, D: Data+Hash+Eq+Serializable> DistinctExtensionTrait for Stream<G, D>
where G::Timestamp: Hash {
fn distinct(&self) -> Stream<G, D> {
let mut elements: HashMap<_, HashSet<_, DefaultState<SipHasher>>> = HashMap::new();
let exch = Exchange::new(|x| hash::<_,SipHasher>(&x));
self.unary_notify(exch, "Distinct", vec![], move |input, output, notificator| {
while let Some((time, data)) = input.pull() {
let set = elements.entry(time).or_insert(Default::default());
let mut session = output.session(&time);
for datum in data.drain(..) {
if set.insert(datum.clone()) {
session.give(datum);
}
}
notificator.notify_at(&time);
}
while let Some((time, _count)) = notificator.next() {
elements.remove(&time);
}
})
}
fn distinct_batch(&self) -> Stream<G, D> {
let mut elements: HashMap<_, HashSet<_, DefaultState<SipHasher>>> = HashMap::new();
let exch = Exchange::new(|x| hash::<_,SipHasher>(&x));
self.unary_notify(exch, "DistinctBlock", vec![], move |input, output, notificator| {
while let Some((time, data)) = input.pull() {
let set = elements.entry(time).or_insert(Default::default());
for datum in data.drain(..) { set.insert(datum); }
notificator.notify_at(&time);
}
while let Some((time, _count)) = notificator.next() {
if let Some(mut data) = elements.remove(&time) {
output.give_at(&time, data.drain());
}
}
})
}
}
|
use std::hash::{hash, Hash, SipHasher};
use std::default::Default;
use communication::Data;
|
random_line_split
|
distinct.rs
|
use std::collections::{HashMap, HashSet};
use std::collections::hash_state::DefaultState;
use std::hash::{hash, Hash, SipHasher};
use std::default::Default;
use communication::Data;
use communication::pact::Exchange;
use construction::{Stream, GraphBuilder};
use construction::operators::unary::UnaryNotifyExt;
use serialization::Serializable;
pub trait DistinctExtensionTrait {
fn distinct(&self) -> Self;
fn distinct_batch(&self) -> Self;
}
impl<G: GraphBuilder, D: Data+Hash+Eq+Serializable> DistinctExtensionTrait for Stream<G, D>
where G::Timestamp: Hash {
fn distinct(&self) -> Stream<G, D>
|
}
fn distinct_batch(&self) -> Stream<G, D> {
let mut elements: HashMap<_, HashSet<_, DefaultState<SipHasher>>> = HashMap::new();
let exch = Exchange::new(|x| hash::<_,SipHasher>(&x));
self.unary_notify(exch, "DistinctBlock", vec![], move |input, output, notificator| {
while let Some((time, data)) = input.pull() {
let set = elements.entry(time).or_insert(Default::default());
for datum in data.drain(..) { set.insert(datum); }
notificator.notify_at(&time);
}
while let Some((time, _count)) = notificator.next() {
if let Some(mut data) = elements.remove(&time) {
output.give_at(&time, data.drain());
}
}
})
}
}
|
{
let mut elements: HashMap<_, HashSet<_, DefaultState<SipHasher>>> = HashMap::new();
let exch = Exchange::new(|x| hash::<_,SipHasher>(&x));
self.unary_notify(exch, "Distinct", vec![], move |input, output, notificator| {
while let Some((time, data)) = input.pull() {
let set = elements.entry(time).or_insert(Default::default());
let mut session = output.session(&time);
for datum in data.drain(..) {
if set.insert(datum.clone()) {
session.give(datum);
}
}
notificator.notify_at(&time);
}
while let Some((time, _count)) = notificator.next() {
elements.remove(&time);
}
})
|
identifier_body
|
class.rs
|
//! Atom-class implementation.
use std::ops::{BitXor, Sub, Range, RangeInclusive};
use std::iter::FromIterator;
use std::fmt::{self, Debug};
use itertools::Itertools;
use super::{Anchor, Atom, ByteOrChar, Element, Repetition, Sequence, Union};
use super::{Distance, Step};
use util::set::{self, Contains};
// ----------------------------------------------------------------
// Class
/// Any member of a class of atoms
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord)]
pub enum ClassMember<T: Atom> {
/// A single atom
Atom(T),
/// A range of atoms
Range(T, T)
}
impl<T: Atom> ClassMember<T> {
/// Map the class-member's atoms to a different type
pub fn map_atoms<U, F>(self, f: F) -> ClassMember<U>
where F: Fn(T) -> U,
U: Atom
{
match self {
ClassMember::Atom(a) => ClassMember::Atom(f(a)),
ClassMember::Range(a, b) => ClassMember::Range(f(a), f(b))
}
}
/// Get the number of individual atoms that this class member represents.
pub fn len(&self) -> usize
where T: Distance
{
match self {
&ClassMember::Atom(_) => 1,
&ClassMember::Range(first, last) => last.distance(&first) + 1,
}
}
/// Fetch an iterator over each discrete value contained by this member.
#[inline]
pub fn iter(&self) -> impl Iterator<Item=T>
where T: Step {
match self {
&ClassMember::Atom(a) => AtomIter(a...a),
&ClassMember::Range(a, b) => AtomIter(a...b),
}
}
}
impl<T: Atom> set::Contains<T> for ClassMember<T> {
/// Check if the member is or contains a particular atom.
fn contains(&self, x: T) -> bool {
match self {
&ClassMember::Atom(a) => x == a,
&ClassMember::Range(a, b) => (a...b).contains(x),
}
}
}
/// Iterator over the atoms specified by a class member.
struct AtomIter<T: Atom>(RangeInclusive<T>);
impl<T: Atom> Iterator for AtomIter<T>
where T: Step
{
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
match self.0 {
RangeInclusive::NonEmpty{start, end} => {
let next_start = start.increment();
if next_start > end {
self.0 = RangeInclusive::Empty{at: end};
} else {
self.0 = RangeInclusive::NonEmpty{start: next_start, end: end};
}
Some(start) },
RangeInclusive::Empty{..} => None
}
}
}
/*impl<T: Atom> Debug for ClassMember<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ClassMember::Atom(a) => write!(f, "{:?}", a),
ClassMember::Range(first, last) => write!(f, "{:?}-{:?}", first, last)
}
}
}*/
impl<T: Atom> From<T> for ClassMember<T> {
fn from(a: T) -> Self { ClassMember::Atom(a) }
}
}
}
}
}
}
}
impl<T: Atom> From<Range<T>> for ClassMember<T>
where T: Sub<usize,Output=T> + Sub<T,Output=usize>
{
fn from(r: Range<T>) -> Self {
if r.end - r.start > 1 { ClassMember::Range(r.start, r.end - 1usize) }
else { ClassMember::Atom(r.start) }
}
}
// ----------------------------------------------------------------
/// A set of atoms and/or ranges of atoms.
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord)]
pub struct Class<T: Atom> {
members: Vec<ClassMember<T>>
}
// FIXME: [optimize] All operations on a Class<T>, including observer and
// predicate methods, currently require at least O(N) time. We could probably
// improve this by changing the storage representation.
impl<T: Atom> Class<T> {
|
}
/// Create a new Class instance from an iterator of ClassMember instances.
pub fn from_members<I>(m: I) -> Self
where I: IntoIterator<Item=ClassMember<T>>
{
Class{members: m.into_iter().collect()}
}
/// Map the atoms in the class to a different atom type.
pub fn map_atoms<U, F>(self, f: F) -> Class<U>
where F: Fn(T) -> U,
U: Atom
{
Class::new(self.members.into_iter().map(|m| m.map_atoms(&f)))
}
/// Check whether the class would match a particular atom.
pub fn matches(&self, x: T) -> bool {
self.members.iter().any(|&range| range.contains(x))
}
/// Get the number of members (atoms) in the class.
///
/// Ranges of atoms count as the number of atoms in each range.
pub fn len(&self) -> usize where T: Distance {
self.members.iter().map(|m| m.len()).sum()
}
/// Check if the class contains zero members.
#[inline]
pub fn is_empty(&self) -> bool {
self.members.is_empty()
}
/// Fetch an iterator over the members specified in the class.
pub fn iter_members<'a>(&'a self) -> impl Iterator<Item=&'a ClassMember<T>> {
self.members.iter()
}
/// Fetch an iterator over the atoms specified in the class.
pub fn iter<'a>(&'a self) -> impl Iterator<Item=T> + 'a where T: Step {
self.members.iter().flat_map(|m| m.iter())
}
}
impl<T: Atom> Debug for Class<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[{}]", self.members.iter().map(|m| format!("{:?}", m)).join(","))
}
}
impl<'a, T: 'a + Atom> set::Contains<&'a ClassMember<T>> for Class<T> {
fn contains(&self, cm: &'a ClassMember<T>) -> bool {
self.iter_members().any(|m| cm == m)
}
}
impl<T: Atom> set::Contains<T> for Class<T> {
/// Check if any member of the class matches the given atom.
fn contains(&self, x: T) -> bool {
self.matches(x)
}
}
impl<T: Atom + Step> set::IsSubsetOf<T> for Class<T> {
/// A class *can* be a subset of an atom -- as long as that atom is the
/// only member of the class!
#[inline]
fn is_subset_of(&self, atom: &T) -> bool {
self.contains(*atom) && self.len() == 1
}
}
impl<T: Atom> set::IsSubsetOf<Anchor<T>> for Class<T> {
/// A class is never a subset of an anchor.
#[inline(always)]
fn is_subset_of(&self, _: &Anchor<T>) -> bool {
false
}
}
impl<T: Atom> super::AtomicLen for Class<T> {
fn atomic_len(&self) -> super::SizeBound {
super::SizeBound::Exact(1)
}
}
// FIXME [optimize] I suspect that we need some sort of specialized data
// structure or clever algorithm in order to perform this test in a time better
// than O(N·M).
impl<T: Atom + Step> set::IsSubsetOf<Class<T>> for Class<T> {
/// A class `A` is a subset of another class `B` if all atoms in `A` are
/// also in `B`.
fn is_subset_of(&self, other: &Self) -> bool {
self.iter().all(|m| other.contains(m))
}
}
impl<T: Atom> set::IsSubsetOf<Sequence<T>> for Class<T> {
/// Any single-atom pattern like Class cannot match the same set of inputs
/// as a Sequence, because a Sequence is guaranteed to contain at least
/// two elements.
#[inline]
fn is_subset_of(&self, _: &Sequence<T>) -> bool {
false
}
}
impl<T: Atom + Step> set::IsSubsetOf<Union<T>> for Class<T> {
/// A class is a subset of a union if all members of the class are subsets
/// of the union.
fn is_subset_of(&self, union: &Union<T>) -> bool {
self.iter().all(|a| a.is_subset_of(union))
}
}
impl<T: Atom + Step> set::IsSubsetOf<Repetition<T>> for Class<T> {
fn is_subset_of(&self, rep: &Repetition<T>) -> bool {
self.iter().all(|a| a.is_subset_of(rep.element())) && rep.count().contains(1)
}
}
impl<T: Atom> set::IsSubsetOf<Element<T>> for Class<T> {
/// A class is always a subset of a wildcard, and never a subset of an
/// anchor. See the other `IsSubsetOf` implementations for `Class` for
/// explanations of how this trait is implements for the other variants of
/// `Element`.
fn is_subset_of(&self, other: &Element<T>) -> bool {
match other {
// Anything we can do, Wildcard can do betteeeeer...
&Element::Wildcard => true,
&Element::Atom(a) => self.is_subset_of(&a),
&Element::Class(ref d) => self.is_subset_of(d),
&Element::Tagged{ref element,..} => self.is_subset_of(&**element),
&Element::Sequence(ref seq) => self.is_subset_of(seq),
&Element::Union(ref union) => self.is_subset_of(union),
&Element::Repeat(ref rep) => self.is_subset_of(rep),
&Element::Anchor(_) => false,
}
}
}
impl<T: Atom, U> FromIterator<U> for Class<T>
where ClassMember<T>: From<U> {
fn from_iter<I>(iter: I) -> Self
where I: IntoIterator<Item=U> {
Class{members: Vec::from_iter(iter.into_iter().map(|x| x.into()))}
}
}
|
/// Create a new Class instance with members taken from the given iterator.
pub fn new<I,U>(items: I) -> Self
where I: IntoIterator<Item=U>,
ClassMember<T>: From<U> {
Class{members: Vec::from_iter(items.into_iter().map(|x| x.into()))}
|
random_line_split
|
class.rs
|
//! Atom-class implementation.
use std::ops::{BitXor, Sub, Range, RangeInclusive};
use std::iter::FromIterator;
use std::fmt::{self, Debug};
use itertools::Itertools;
use super::{Anchor, Atom, ByteOrChar, Element, Repetition, Sequence, Union};
use super::{Distance, Step};
use util::set::{self, Contains};
// ----------------------------------------------------------------
// Class
/// Any member of a class of atoms
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord)]
pub enum ClassMember<T: Atom> {
/// A single atom
Atom(T),
/// A range of atoms
Range(T, T)
}
impl<T: Atom> ClassMember<T> {
/// Map the class-member's atoms to a different type
pub fn map_atoms<U, F>(self, f: F) -> ClassMember<U>
where F: Fn(T) -> U,
U: Atom
{
match self {
ClassMember::Atom(a) => ClassMember::Atom(f(a)),
ClassMember::Range(a, b) => ClassMember::Range(f(a), f(b))
}
}
/// Get the number of individual atoms that this class member represents.
pub fn len(&self) -> usize
where T: Distance
{
match self {
&ClassMember::Atom(_) => 1,
&ClassMember::Range(first, last) => last.distance(&first) + 1,
}
}
/// Fetch an iterator over each discrete value contained by this member.
#[inline]
pub fn iter(&self) -> impl Iterator<Item=T>
where T: Step {
match self {
&ClassMember::Atom(a) => AtomIter(a...a),
&ClassMember::Range(a, b) => AtomIter(a...b),
}
}
}
impl<T: Atom> set::Contains<T> for ClassMember<T> {
/// Check if the member is or contains a particular atom.
fn contains(&self, x: T) -> bool {
match self {
&ClassMember::Atom(a) => x == a,
&ClassMember::Range(a, b) => (a...b).contains(x),
}
}
}
/// Iterator over the atoms specified by a class member.
struct AtomIter<T: Atom>(RangeInclusive<T>);
impl<T: Atom> Iterator for AtomIter<T>
where T: Step
{
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
match self.0 {
RangeInclusive::NonEmpty{start, end} => {
let next_start = start.increment();
if next_start > end {
self.0 = RangeInclusive::Empty{at: end};
} else {
self.0 = RangeInclusive::NonEmpty{start: next_start, end: end};
}
Some(start) },
RangeInclusive::Empty{..} => None
}
}
}
/*impl<T: Atom> Debug for ClassMember<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ClassMember::Atom(a) => write!(f, "{:?}", a),
ClassMember::Range(first, last) => write!(f, "{:?}-{:?}", first, last)
}
}
}*/
impl<T: Atom> From<T> for ClassMember<T> {
fn from(a: T) -> Self { ClassMember::Atom(a) }
}
}
}
}
}
}
}
impl<T: Atom> From<Range<T>> for ClassMember<T>
where T: Sub<usize,Output=T> + Sub<T,Output=usize>
{
fn from(r: Range<T>) -> Self {
if r.end - r.start > 1 { ClassMember::Range(r.start, r.end - 1usize) }
else { ClassMember::Atom(r.start) }
}
}
// ----------------------------------------------------------------
/// A set of atoms and/or ranges of atoms.
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord)]
pub struct Class<T: Atom> {
members: Vec<ClassMember<T>>
}
// FIXME: [optimize] All operations on a Class<T>, including observer and
// predicate methods, currently require at least O(N) time. We could probably
// improve this by changing the storage representation.
impl<T: Atom> Class<T> {
/// Create a new Class instance with members taken from the given iterator.
pub fn new<I,U>(items: I) -> Self
where I: IntoIterator<Item=U>,
ClassMember<T>: From<U> {
Class{members: Vec::from_iter(items.into_iter().map(|x| x.into()))}
}
/// Create a new Class instance from an iterator of ClassMember instances.
pub fn from_members<I>(m: I) -> Self
where I: IntoIterator<Item=ClassMember<T>>
{
Class{members: m.into_iter().collect()}
}
/// Map the atoms in the class to a different atom type.
pub fn map_atoms<U, F>(self, f: F) -> Class<U>
where F: Fn(T) -> U,
U: Atom
{
Class::new(self.members.into_iter().map(|m| m.map_atoms(&f)))
}
/// Check whether the class would match a particular atom.
pub fn matches(&self, x: T) -> bool {
self.members.iter().any(|&range| range.contains(x))
}
/// Get the number of members (atoms) in the class.
///
/// Ranges of atoms count as the number of atoms in each range.
pub fn len(&self) -> usize where T: Distance {
self.members.iter().map(|m| m.len()).sum()
}
/// Check if the class contains zero members.
#[inline]
pub fn is_empty(&self) -> bool {
self.members.is_empty()
}
/// Fetch an iterator over the members specified in the class.
pub fn iter_members<'a>(&'a self) -> impl Iterator<Item=&'a ClassMember<T>>
|
/// Fetch an iterator over the atoms specified in the class.
pub fn iter<'a>(&'a self) -> impl Iterator<Item=T> + 'a where T: Step {
self.members.iter().flat_map(|m| m.iter())
}
}
impl<T: Atom> Debug for Class<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[{}]", self.members.iter().map(|m| format!("{:?}", m)).join(","))
}
}
impl<'a, T: 'a + Atom> set::Contains<&'a ClassMember<T>> for Class<T> {
fn contains(&self, cm: &'a ClassMember<T>) -> bool {
self.iter_members().any(|m| cm == m)
}
}
impl<T: Atom> set::Contains<T> for Class<T> {
/// Check if any member of the class matches the given atom.
fn contains(&self, x: T) -> bool {
self.matches(x)
}
}
impl<T: Atom + Step> set::IsSubsetOf<T> for Class<T> {
/// A class *can* be a subset of an atom -- as long as that atom is the
/// only member of the class!
#[inline]
fn is_subset_of(&self, atom: &T) -> bool {
self.contains(*atom) && self.len() == 1
}
}
impl<T: Atom> set::IsSubsetOf<Anchor<T>> for Class<T> {
/// A class is never a subset of an anchor.
#[inline(always)]
fn is_subset_of(&self, _: &Anchor<T>) -> bool {
false
}
}
impl<T: Atom> super::AtomicLen for Class<T> {
fn atomic_len(&self) -> super::SizeBound {
super::SizeBound::Exact(1)
}
}
// FIXME [optimize] I suspect that we need some sort of specialized data
// structure or clever algorithm in order to perform this test in a time better
// than O(N·M).
impl<T: Atom + Step> set::IsSubsetOf<Class<T>> for Class<T> {
/// A class `A` is a subset of another class `B` if all atoms in `A` are
/// also in `B`.
fn is_subset_of(&self, other: &Self) -> bool {
self.iter().all(|m| other.contains(m))
}
}
impl<T: Atom> set::IsSubsetOf<Sequence<T>> for Class<T> {
/// Any single-atom pattern like Class cannot match the same set of inputs
/// as a Sequence, because a Sequence is guaranteed to contain at least
/// two elements.
#[inline]
fn is_subset_of(&self, _: &Sequence<T>) -> bool {
false
}
}
impl<T: Atom + Step> set::IsSubsetOf<Union<T>> for Class<T> {
/// A class is a subset of a union if all members of the class are subsets
/// of the union.
fn is_subset_of(&self, union: &Union<T>) -> bool {
self.iter().all(|a| a.is_subset_of(union))
}
}
impl<T: Atom + Step> set::IsSubsetOf<Repetition<T>> for Class<T> {
fn is_subset_of(&self, rep: &Repetition<T>) -> bool {
self.iter().all(|a| a.is_subset_of(rep.element())) && rep.count().contains(1)
}
}
impl<T: Atom> set::IsSubsetOf<Element<T>> for Class<T> {
/// A class is always a subset of a wildcard, and never a subset of an
/// anchor. See the other `IsSubsetOf` implementations for `Class` for
/// explanations of how this trait is implements for the other variants of
/// `Element`.
fn is_subset_of(&self, other: &Element<T>) -> bool {
match other {
// Anything we can do, Wildcard can do betteeeeer...
&Element::Wildcard => true,
&Element::Atom(a) => self.is_subset_of(&a),
&Element::Class(ref d) => self.is_subset_of(d),
&Element::Tagged{ref element,..} => self.is_subset_of(&**element),
&Element::Sequence(ref seq) => self.is_subset_of(seq),
&Element::Union(ref union) => self.is_subset_of(union),
&Element::Repeat(ref rep) => self.is_subset_of(rep),
&Element::Anchor(_) => false,
}
}
}
impl<T: Atom, U> FromIterator<U> for Class<T>
where ClassMember<T>: From<U> {
fn from_iter<I>(iter: I) -> Self
where I: IntoIterator<Item=U> {
Class{members: Vec::from_iter(iter.into_iter().map(|x| x.into()))}
}
}
|
{
self.members.iter()
}
|
identifier_body
|
class.rs
|
//! Atom-class implementation.
use std::ops::{BitXor, Sub, Range, RangeInclusive};
use std::iter::FromIterator;
use std::fmt::{self, Debug};
use itertools::Itertools;
use super::{Anchor, Atom, ByteOrChar, Element, Repetition, Sequence, Union};
use super::{Distance, Step};
use util::set::{self, Contains};
// ----------------------------------------------------------------
// Class
/// Any member of a class of atoms
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord)]
pub enum ClassMember<T: Atom> {
/// A single atom
Atom(T),
/// A range of atoms
Range(T, T)
}
impl<T: Atom> ClassMember<T> {
/// Map the class-member's atoms to a different type
pub fn map_atoms<U, F>(self, f: F) -> ClassMember<U>
where F: Fn(T) -> U,
U: Atom
{
match self {
ClassMember::Atom(a) => ClassMember::Atom(f(a)),
ClassMember::Range(a, b) => ClassMember::Range(f(a), f(b))
}
}
/// Get the number of individual atoms that this class member represents.
pub fn len(&self) -> usize
where T: Distance
{
match self {
&ClassMember::Atom(_) => 1,
&ClassMember::Range(first, last) => last.distance(&first) + 1,
}
}
/// Fetch an iterator over each discrete value contained by this member.
#[inline]
pub fn iter(&self) -> impl Iterator<Item=T>
where T: Step {
match self {
&ClassMember::Atom(a) => AtomIter(a...a),
&ClassMember::Range(a, b) => AtomIter(a...b),
}
}
}
impl<T: Atom> set::Contains<T> for ClassMember<T> {
/// Check if the member is or contains a particular atom.
fn contains(&self, x: T) -> bool {
match self {
&ClassMember::Atom(a) => x == a,
&ClassMember::Range(a, b) => (a...b).contains(x),
}
}
}
/// Iterator over the atoms specified by a class member.
struct AtomIter<T: Atom>(RangeInclusive<T>);
impl<T: Atom> Iterator for AtomIter<T>
where T: Step
{
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
match self.0 {
RangeInclusive::NonEmpty{start, end} => {
let next_start = start.increment();
if next_start > end {
self.0 = RangeInclusive::Empty{at: end};
} else {
self.0 = RangeInclusive::NonEmpty{start: next_start, end: end};
}
Some(start) },
RangeInclusive::Empty{..} => None
}
}
}
/*impl<T: Atom> Debug for ClassMember<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ClassMember::Atom(a) => write!(f, "{:?}", a),
ClassMember::Range(first, last) => write!(f, "{:?}-{:?}", first, last)
}
}
}*/
impl<T: Atom> From<T> for ClassMember<T> {
fn from(a: T) -> Self { ClassMember::Atom(a) }
}
}
}
}
}
}
}
impl<T: Atom> From<Range<T>> for ClassMember<T>
where T: Sub<usize,Output=T> + Sub<T,Output=usize>
{
fn from(r: Range<T>) -> Self {
if r.end - r.start > 1 { ClassMember::Range(r.start, r.end - 1usize) }
else { ClassMember::Atom(r.start) }
}
}
// ----------------------------------------------------------------
/// A set of atoms and/or ranges of atoms.
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord)]
pub struct Class<T: Atom> {
members: Vec<ClassMember<T>>
}
// FIXME: [optimize] All operations on a Class<T>, including observer and
// predicate methods, currently require at least O(N) time. We could probably
// improve this by changing the storage representation.
impl<T: Atom> Class<T> {
/// Create a new Class instance with members taken from the given iterator.
pub fn new<I,U>(items: I) -> Self
where I: IntoIterator<Item=U>,
ClassMember<T>: From<U> {
Class{members: Vec::from_iter(items.into_iter().map(|x| x.into()))}
}
/// Create a new Class instance from an iterator of ClassMember instances.
pub fn from_members<I>(m: I) -> Self
where I: IntoIterator<Item=ClassMember<T>>
{
Class{members: m.into_iter().collect()}
}
/// Map the atoms in the class to a different atom type.
pub fn map_atoms<U, F>(self, f: F) -> Class<U>
where F: Fn(T) -> U,
U: Atom
{
Class::new(self.members.into_iter().map(|m| m.map_atoms(&f)))
}
/// Check whether the class would match a particular atom.
pub fn matches(&self, x: T) -> bool {
self.members.iter().any(|&range| range.contains(x))
}
/// Get the number of members (atoms) in the class.
///
/// Ranges of atoms count as the number of atoms in each range.
pub fn len(&self) -> usize where T: Distance {
self.members.iter().map(|m| m.len()).sum()
}
/// Check if the class contains zero members.
#[inline]
pub fn is_empty(&self) -> bool {
self.members.is_empty()
}
/// Fetch an iterator over the members specified in the class.
pub fn iter_members<'a>(&'a self) -> impl Iterator<Item=&'a ClassMember<T>> {
self.members.iter()
}
/// Fetch an iterator over the atoms specified in the class.
pub fn iter<'a>(&'a self) -> impl Iterator<Item=T> + 'a where T: Step {
self.members.iter().flat_map(|m| m.iter())
}
}
impl<T: Atom> Debug for Class<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[{}]", self.members.iter().map(|m| format!("{:?}", m)).join(","))
}
}
impl<'a, T: 'a + Atom> set::Contains<&'a ClassMember<T>> for Class<T> {
fn
|
(&self, cm: &'a ClassMember<T>) -> bool {
self.iter_members().any(|m| cm == m)
}
}
impl<T: Atom> set::Contains<T> for Class<T> {
/// Check if any member of the class matches the given atom.
fn contains(&self, x: T) -> bool {
self.matches(x)
}
}
impl<T: Atom + Step> set::IsSubsetOf<T> for Class<T> {
/// A class *can* be a subset of an atom -- as long as that atom is the
/// only member of the class!
#[inline]
fn is_subset_of(&self, atom: &T) -> bool {
self.contains(*atom) && self.len() == 1
}
}
impl<T: Atom> set::IsSubsetOf<Anchor<T>> for Class<T> {
/// A class is never a subset of an anchor.
#[inline(always)]
fn is_subset_of(&self, _: &Anchor<T>) -> bool {
false
}
}
impl<T: Atom> super::AtomicLen for Class<T> {
fn atomic_len(&self) -> super::SizeBound {
super::SizeBound::Exact(1)
}
}
// FIXME [optimize] I suspect that we need some sort of specialized data
// structure or clever algorithm in order to perform this test in a time better
// than O(N·M).
impl<T: Atom + Step> set::IsSubsetOf<Class<T>> for Class<T> {
/// A class `A` is a subset of another class `B` if all atoms in `A` are
/// also in `B`.
fn is_subset_of(&self, other: &Self) -> bool {
self.iter().all(|m| other.contains(m))
}
}
impl<T: Atom> set::IsSubsetOf<Sequence<T>> for Class<T> {
/// Any single-atom pattern like Class cannot match the same set of inputs
/// as a Sequence, because a Sequence is guaranteed to contain at least
/// two elements.
#[inline]
fn is_subset_of(&self, _: &Sequence<T>) -> bool {
false
}
}
impl<T: Atom + Step> set::IsSubsetOf<Union<T>> for Class<T> {
/// A class is a subset of a union if all members of the class are subsets
/// of the union.
fn is_subset_of(&self, union: &Union<T>) -> bool {
self.iter().all(|a| a.is_subset_of(union))
}
}
impl<T: Atom + Step> set::IsSubsetOf<Repetition<T>> for Class<T> {
fn is_subset_of(&self, rep: &Repetition<T>) -> bool {
self.iter().all(|a| a.is_subset_of(rep.element())) && rep.count().contains(1)
}
}
impl<T: Atom> set::IsSubsetOf<Element<T>> for Class<T> {
/// A class is always a subset of a wildcard, and never a subset of an
/// anchor. See the other `IsSubsetOf` implementations for `Class` for
/// explanations of how this trait is implements for the other variants of
/// `Element`.
fn is_subset_of(&self, other: &Element<T>) -> bool {
match other {
// Anything we can do, Wildcard can do betteeeeer...
&Element::Wildcard => true,
&Element::Atom(a) => self.is_subset_of(&a),
&Element::Class(ref d) => self.is_subset_of(d),
&Element::Tagged{ref element,..} => self.is_subset_of(&**element),
&Element::Sequence(ref seq) => self.is_subset_of(seq),
&Element::Union(ref union) => self.is_subset_of(union),
&Element::Repeat(ref rep) => self.is_subset_of(rep),
&Element::Anchor(_) => false,
}
}
}
impl<T: Atom, U> FromIterator<U> for Class<T>
where ClassMember<T>: From<U> {
fn from_iter<I>(iter: I) -> Self
where I: IntoIterator<Item=U> {
Class{members: Vec::from_iter(iter.into_iter().map(|x| x.into()))}
}
}
|
contains
|
identifier_name
|
xrspace.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::reflect_dom_object;
use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
use crate::dom::eventtarget::EventTarget;
use crate::dom::globalscope::GlobalScope;
use crate::dom::xrinputsource::XRInputSource;
use crate::dom::xrjointspace::XRJointSpace;
use crate::dom::xrreferencespace::XRReferenceSpace;
use crate::dom::xrsession::{cast_transform, ApiPose, XRSession};
use dom_struct::dom_struct;
use euclid::RigidTransform3D;
use webxr_api::{BaseSpace, Frame, Space};
#[dom_struct]
pub struct XRSpace {
eventtarget: EventTarget,
session: Dom<XRSession>,
input_source: MutNullableDom<XRInputSource>,
/// If we're an input space, are we an aim space or a grip space?
is_grip_space: bool,
}
impl XRSpace {
pub fn new_inherited(session: &XRSession) -> XRSpace {
XRSpace {
eventtarget: EventTarget::new_inherited(),
session: Dom::from_ref(session),
input_source: Default::default(),
is_grip_space: false,
}
}
fn new_inputspace_inner(
session: &XRSession,
input: &XRInputSource,
is_grip_space: bool,
) -> XRSpace {
XRSpace {
eventtarget: EventTarget::new_inherited(),
session: Dom::from_ref(session),
input_source: MutNullableDom::new(Some(input)),
is_grip_space,
}
}
pub fn new_inputspace(
global: &GlobalScope,
session: &XRSession,
input: &XRInputSource,
is_grip_space: bool,
) -> DomRoot<XRSpace> {
reflect_dom_object(
Box::new(XRSpace::new_inputspace_inner(session, input, is_grip_space)),
global,
)
}
pub fn space(&self) -> Space {
if let Some(rs) = self.downcast::<XRReferenceSpace>() {
rs.space()
} else if let Some(j) = self.downcast::<XRJointSpace>() {
j.space()
} else if let Some(source) = self.input_source.get() {
let base = if self.is_grip_space {
BaseSpace::Grip(source.id())
} else {
BaseSpace::TargetRay(source.id())
};
Space {
base,
offset: RigidTransform3D::identity(),
}
} else {
panic!("invalid space found")
}
}
}
impl XRSpace {
/// Gets pose represented by this space
///
/// The reference origin used is common between all
/// get_pose calls for spaces from the same device, so this can be used to compare
/// with other spaces
pub fn get_pose(&self, base_pose: &Frame) -> Option<ApiPose> {
if let Some(reference) = self.downcast::<XRReferenceSpace>() {
reference.get_pose(base_pose)
} else if let Some(joint) = self.downcast::<XRJointSpace>() {
joint.get_pose(base_pose)
} else if let Some(source) = self.input_source.get() {
// XXXManishearth we should be able to request frame information
// for inputs when necessary instead of always loading it
//
// Also, the below code is quadratic, so this API may need an overhaul anyway
let id = source.id();
// XXXManishearth once we have dynamic inputs we'll need to handle this better
let frame = base_pose
.inputs
.iter()
.find(|i| i.id == id)
.expect("no input found");
if self.is_grip_space {
frame.grip_origin.map(cast_transform)
} else
|
} else {
unreachable!()
}
}
pub fn session(&self) -> &XRSession {
&self.session
}
}
|
{
frame.target_ray_origin.map(cast_transform)
}
|
conditional_block
|
xrspace.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::reflect_dom_object;
use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
use crate::dom::eventtarget::EventTarget;
use crate::dom::globalscope::GlobalScope;
use crate::dom::xrinputsource::XRInputSource;
use crate::dom::xrjointspace::XRJointSpace;
use crate::dom::xrreferencespace::XRReferenceSpace;
use crate::dom::xrsession::{cast_transform, ApiPose, XRSession};
use dom_struct::dom_struct;
use euclid::RigidTransform3D;
use webxr_api::{BaseSpace, Frame, Space};
#[dom_struct]
pub struct XRSpace {
eventtarget: EventTarget,
session: Dom<XRSession>,
input_source: MutNullableDom<XRInputSource>,
/// If we're an input space, are we an aim space or a grip space?
is_grip_space: bool,
}
impl XRSpace {
pub fn
|
(session: &XRSession) -> XRSpace {
XRSpace {
eventtarget: EventTarget::new_inherited(),
session: Dom::from_ref(session),
input_source: Default::default(),
is_grip_space: false,
}
}
fn new_inputspace_inner(
session: &XRSession,
input: &XRInputSource,
is_grip_space: bool,
) -> XRSpace {
XRSpace {
eventtarget: EventTarget::new_inherited(),
session: Dom::from_ref(session),
input_source: MutNullableDom::new(Some(input)),
is_grip_space,
}
}
pub fn new_inputspace(
global: &GlobalScope,
session: &XRSession,
input: &XRInputSource,
is_grip_space: bool,
) -> DomRoot<XRSpace> {
reflect_dom_object(
Box::new(XRSpace::new_inputspace_inner(session, input, is_grip_space)),
global,
)
}
pub fn space(&self) -> Space {
if let Some(rs) = self.downcast::<XRReferenceSpace>() {
rs.space()
} else if let Some(j) = self.downcast::<XRJointSpace>() {
j.space()
} else if let Some(source) = self.input_source.get() {
let base = if self.is_grip_space {
BaseSpace::Grip(source.id())
} else {
BaseSpace::TargetRay(source.id())
};
Space {
base,
offset: RigidTransform3D::identity(),
}
} else {
panic!("invalid space found")
}
}
}
impl XRSpace {
/// Gets pose represented by this space
///
/// The reference origin used is common between all
/// get_pose calls for spaces from the same device, so this can be used to compare
/// with other spaces
pub fn get_pose(&self, base_pose: &Frame) -> Option<ApiPose> {
if let Some(reference) = self.downcast::<XRReferenceSpace>() {
reference.get_pose(base_pose)
} else if let Some(joint) = self.downcast::<XRJointSpace>() {
joint.get_pose(base_pose)
} else if let Some(source) = self.input_source.get() {
// XXXManishearth we should be able to request frame information
// for inputs when necessary instead of always loading it
//
// Also, the below code is quadratic, so this API may need an overhaul anyway
let id = source.id();
// XXXManishearth once we have dynamic inputs we'll need to handle this better
let frame = base_pose
.inputs
.iter()
.find(|i| i.id == id)
.expect("no input found");
if self.is_grip_space {
frame.grip_origin.map(cast_transform)
} else {
frame.target_ray_origin.map(cast_transform)
}
} else {
unreachable!()
}
}
pub fn session(&self) -> &XRSession {
&self.session
}
}
|
new_inherited
|
identifier_name
|
xrspace.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::reflect_dom_object;
use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
use crate::dom::eventtarget::EventTarget;
use crate::dom::globalscope::GlobalScope;
use crate::dom::xrinputsource::XRInputSource;
use crate::dom::xrjointspace::XRJointSpace;
use crate::dom::xrreferencespace::XRReferenceSpace;
use crate::dom::xrsession::{cast_transform, ApiPose, XRSession};
use dom_struct::dom_struct;
use euclid::RigidTransform3D;
use webxr_api::{BaseSpace, Frame, Space};
#[dom_struct]
pub struct XRSpace {
eventtarget: EventTarget,
session: Dom<XRSession>,
input_source: MutNullableDom<XRInputSource>,
/// If we're an input space, are we an aim space or a grip space?
is_grip_space: bool,
}
impl XRSpace {
pub fn new_inherited(session: &XRSession) -> XRSpace {
XRSpace {
eventtarget: EventTarget::new_inherited(),
session: Dom::from_ref(session),
input_source: Default::default(),
is_grip_space: false,
}
}
fn new_inputspace_inner(
session: &XRSession,
input: &XRInputSource,
is_grip_space: bool,
) -> XRSpace {
XRSpace {
eventtarget: EventTarget::new_inherited(),
session: Dom::from_ref(session),
input_source: MutNullableDom::new(Some(input)),
is_grip_space,
}
}
pub fn new_inputspace(
global: &GlobalScope,
session: &XRSession,
input: &XRInputSource,
is_grip_space: bool,
) -> DomRoot<XRSpace> {
reflect_dom_object(
Box::new(XRSpace::new_inputspace_inner(session, input, is_grip_space)),
global,
)
}
pub fn space(&self) -> Space {
if let Some(rs) = self.downcast::<XRReferenceSpace>() {
rs.space()
} else if let Some(j) = self.downcast::<XRJointSpace>() {
j.space()
|
BaseSpace::TargetRay(source.id())
};
Space {
base,
offset: RigidTransform3D::identity(),
}
} else {
panic!("invalid space found")
}
}
}
impl XRSpace {
/// Gets pose represented by this space
///
/// The reference origin used is common between all
/// get_pose calls for spaces from the same device, so this can be used to compare
/// with other spaces
pub fn get_pose(&self, base_pose: &Frame) -> Option<ApiPose> {
if let Some(reference) = self.downcast::<XRReferenceSpace>() {
reference.get_pose(base_pose)
} else if let Some(joint) = self.downcast::<XRJointSpace>() {
joint.get_pose(base_pose)
} else if let Some(source) = self.input_source.get() {
// XXXManishearth we should be able to request frame information
// for inputs when necessary instead of always loading it
//
// Also, the below code is quadratic, so this API may need an overhaul anyway
let id = source.id();
// XXXManishearth once we have dynamic inputs we'll need to handle this better
let frame = base_pose
.inputs
.iter()
.find(|i| i.id == id)
.expect("no input found");
if self.is_grip_space {
frame.grip_origin.map(cast_transform)
} else {
frame.target_ray_origin.map(cast_transform)
}
} else {
unreachable!()
}
}
pub fn session(&self) -> &XRSession {
&self.session
}
}
|
} else if let Some(source) = self.input_source.get() {
let base = if self.is_grip_space {
BaseSpace::Grip(source.id())
} else {
|
random_line_split
|
xrspace.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::reflect_dom_object;
use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
use crate::dom::eventtarget::EventTarget;
use crate::dom::globalscope::GlobalScope;
use crate::dom::xrinputsource::XRInputSource;
use crate::dom::xrjointspace::XRJointSpace;
use crate::dom::xrreferencespace::XRReferenceSpace;
use crate::dom::xrsession::{cast_transform, ApiPose, XRSession};
use dom_struct::dom_struct;
use euclid::RigidTransform3D;
use webxr_api::{BaseSpace, Frame, Space};
#[dom_struct]
pub struct XRSpace {
eventtarget: EventTarget,
session: Dom<XRSession>,
input_source: MutNullableDom<XRInputSource>,
/// If we're an input space, are we an aim space or a grip space?
is_grip_space: bool,
}
impl XRSpace {
pub fn new_inherited(session: &XRSession) -> XRSpace {
XRSpace {
eventtarget: EventTarget::new_inherited(),
session: Dom::from_ref(session),
input_source: Default::default(),
is_grip_space: false,
}
}
fn new_inputspace_inner(
session: &XRSession,
input: &XRInputSource,
is_grip_space: bool,
) -> XRSpace {
XRSpace {
eventtarget: EventTarget::new_inherited(),
session: Dom::from_ref(session),
input_source: MutNullableDom::new(Some(input)),
is_grip_space,
}
}
pub fn new_inputspace(
global: &GlobalScope,
session: &XRSession,
input: &XRInputSource,
is_grip_space: bool,
) -> DomRoot<XRSpace> {
reflect_dom_object(
Box::new(XRSpace::new_inputspace_inner(session, input, is_grip_space)),
global,
)
}
pub fn space(&self) -> Space
|
}
impl XRSpace {
/// Gets pose represented by this space
///
/// The reference origin used is common between all
/// get_pose calls for spaces from the same device, so this can be used to compare
/// with other spaces
pub fn get_pose(&self, base_pose: &Frame) -> Option<ApiPose> {
if let Some(reference) = self.downcast::<XRReferenceSpace>() {
reference.get_pose(base_pose)
} else if let Some(joint) = self.downcast::<XRJointSpace>() {
joint.get_pose(base_pose)
} else if let Some(source) = self.input_source.get() {
// XXXManishearth we should be able to request frame information
// for inputs when necessary instead of always loading it
//
// Also, the below code is quadratic, so this API may need an overhaul anyway
let id = source.id();
// XXXManishearth once we have dynamic inputs we'll need to handle this better
let frame = base_pose
.inputs
.iter()
.find(|i| i.id == id)
.expect("no input found");
if self.is_grip_space {
frame.grip_origin.map(cast_transform)
} else {
frame.target_ray_origin.map(cast_transform)
}
} else {
unreachable!()
}
}
pub fn session(&self) -> &XRSession {
&self.session
}
}
|
{
if let Some(rs) = self.downcast::<XRReferenceSpace>() {
rs.space()
} else if let Some(j) = self.downcast::<XRJointSpace>() {
j.space()
} else if let Some(source) = self.input_source.get() {
let base = if self.is_grip_space {
BaseSpace::Grip(source.id())
} else {
BaseSpace::TargetRay(source.id())
};
Space {
base,
offset: RigidTransform3D::identity(),
}
} else {
panic!("invalid space found")
}
}
|
identifier_body
|
roman-numerals-macro.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
|
// except according to those terms.
// aux-build:roman_numerals.rs
// ignore-stage1
#![feature(phase)]
#[phase(plugin)]
extern crate roman_numerals;
pub fn main() {
assert_eq!(rn!(MMXV), 2015);
assert_eq!(rn!(MCMXCIX), 1999);
assert_eq!(rn!(XXV), 25);
assert_eq!(rn!(MDCLXVI), 1666);
assert_eq!(rn!(MMMDCCCLXXXVIII), 3888);
assert_eq!(rn!(MMXIV), 2014);
}
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
|
random_line_split
|
roman-numerals-macro.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:roman_numerals.rs
// ignore-stage1
#![feature(phase)]
#[phase(plugin)]
extern crate roman_numerals;
pub fn
|
() {
assert_eq!(rn!(MMXV), 2015);
assert_eq!(rn!(MCMXCIX), 1999);
assert_eq!(rn!(XXV), 25);
assert_eq!(rn!(MDCLXVI), 1666);
assert_eq!(rn!(MMMDCCCLXXXVIII), 3888);
assert_eq!(rn!(MMXIV), 2014);
}
|
main
|
identifier_name
|
roman-numerals-macro.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:roman_numerals.rs
// ignore-stage1
#![feature(phase)]
#[phase(plugin)]
extern crate roman_numerals;
pub fn main()
|
{
assert_eq!(rn!(MMXV), 2015);
assert_eq!(rn!(MCMXCIX), 1999);
assert_eq!(rn!(XXV), 25);
assert_eq!(rn!(MDCLXVI), 1666);
assert_eq!(rn!(MMMDCCCLXXXVIII), 3888);
assert_eq!(rn!(MMXIV), 2014);
}
|
identifier_body
|
|
client.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Hash-addressed content resolver & fetcher.
use std::{io, fs};
use std::sync::Arc;
use std::path::PathBuf;
use util::{Mutex, H256, sha3};
use fetch::{Fetch, FetchError, Client as FetchClient};
use urlhint::{ContractClient, URLHintContract, URLHint, URLHintResult};
/// API for fetching by hash.
pub trait HashFetch {
/// Fetch hash-addressed content.
/// Parameters:
/// 1. `hash` - content hash
/// 2. `on_done` - callback function invoked when the content is ready (or there was error during fetch)
///
/// This function may fail immediately when fetch cannot be initialized or content cannot be resolved.
fn fetch(&self, hash: H256, on_done: Box<Fn(Result<PathBuf, Error>) + Send>) -> Result<(), Error>;
}
/// Hash-fetching error.
#[derive(Debug)]
pub enum Error {
/// Hash could not be resolved to a valid content address.
NoResolution,
/// Downloaded content hash does not match.
HashMismatch { expected: H256, got: H256 },
/// IO Error while validating hash.
IO(io::Error),
/// Error during fetch.
Fetch(FetchError),
}
impl From<FetchError> for Error {
fn from(error: FetchError) -> Self {
Error::Fetch(error)
}
}
impl From<io::Error> for Error {
fn from(error: io::Error) -> Self {
Error::IO(error)
}
}
/// Default Hash-fetching client using on-chain contract to resolve hashes to URLs.
pub struct Client {
contract: URLHintContract,
fetch: Mutex<FetchClient>,
}
impl Client {
/// Creates new instance of the `Client` given on-chain contract client.
pub fn new(contract: Arc<ContractClient>) -> Self
|
}
impl HashFetch for Client {
fn fetch(&self, hash: H256, on_done: Box<Fn(Result<PathBuf, Error>) + Send>) -> Result<(), Error> {
debug!(target: "dapps", "Fetching: {:?}", hash);
let url = try!(
self.contract.resolve(hash.to_vec()).map(|content| match content {
URLHintResult::Dapp(dapp) => {
dapp.url()
},
URLHintResult::Content(content) => {
content.url
},
}).ok_or_else(|| Error::NoResolution)
);
debug!(target: "dapps", "Resolved {:?} to {:?}. Fetching...", hash, url);
self.fetch.lock().request_async(&url, Default::default(), Box::new(move |result| {
fn validate_hash(hash: H256, result: Result<PathBuf, FetchError>) -> Result<PathBuf, Error> {
let path = try!(result);
let mut file_reader = io::BufReader::new(try!(fs::File::open(&path)));
let content_hash = try!(sha3(&mut file_reader));
if content_hash!= hash {
Err(Error::HashMismatch{ got: content_hash, expected: hash })
} else {
Ok(path)
}
}
debug!(target: "dapps", "Content fetched, validating hash ({:?})", hash);
on_done(validate_hash(hash, result))
})).map_err(Into::into)
}
}
|
{
Client {
contract: URLHintContract::new(contract),
fetch: Mutex::new(FetchClient::default()),
}
}
|
identifier_body
|
client.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Hash-addressed content resolver & fetcher.
use std::{io, fs};
use std::sync::Arc;
use std::path::PathBuf;
use util::{Mutex, H256, sha3};
use fetch::{Fetch, FetchError, Client as FetchClient};
use urlhint::{ContractClient, URLHintContract, URLHint, URLHintResult};
/// API for fetching by hash.
pub trait HashFetch {
/// Fetch hash-addressed content.
/// Parameters:
/// 1. `hash` - content hash
/// 2. `on_done` - callback function invoked when the content is ready (or there was error during fetch)
///
/// This function may fail immediately when fetch cannot be initialized or content cannot be resolved.
fn fetch(&self, hash: H256, on_done: Box<Fn(Result<PathBuf, Error>) + Send>) -> Result<(), Error>;
}
/// Hash-fetching error.
#[derive(Debug)]
pub enum Error {
/// Hash could not be resolved to a valid content address.
NoResolution,
/// Downloaded content hash does not match.
HashMismatch { expected: H256, got: H256 },
/// IO Error while validating hash.
IO(io::Error),
/// Error during fetch.
Fetch(FetchError),
}
impl From<FetchError> for Error {
fn from(error: FetchError) -> Self {
Error::Fetch(error)
}
}
impl From<io::Error> for Error {
fn from(error: io::Error) -> Self {
Error::IO(error)
}
}
/// Default Hash-fetching client using on-chain contract to resolve hashes to URLs.
pub struct Client {
contract: URLHintContract,
fetch: Mutex<FetchClient>,
}
impl Client {
/// Creates new instance of the `Client` given on-chain contract client.
pub fn new(contract: Arc<ContractClient>) -> Self {
Client {
contract: URLHintContract::new(contract),
fetch: Mutex::new(FetchClient::default()),
}
}
}
impl HashFetch for Client {
fn fetch(&self, hash: H256, on_done: Box<Fn(Result<PathBuf, Error>) + Send>) -> Result<(), Error> {
debug!(target: "dapps", "Fetching: {:?}", hash);
let url = try!(
self.contract.resolve(hash.to_vec()).map(|content| match content {
URLHintResult::Dapp(dapp) => {
dapp.url()
},
URLHintResult::Content(content) => {
content.url
},
}).ok_or_else(|| Error::NoResolution)
);
debug!(target: "dapps", "Resolved {:?} to {:?}. Fetching...", hash, url);
self.fetch.lock().request_async(&url, Default::default(), Box::new(move |result| {
fn validate_hash(hash: H256, result: Result<PathBuf, FetchError>) -> Result<PathBuf, Error> {
let path = try!(result);
let mut file_reader = io::BufReader::new(try!(fs::File::open(&path)));
let content_hash = try!(sha3(&mut file_reader));
if content_hash!= hash
|
else {
Ok(path)
}
}
debug!(target: "dapps", "Content fetched, validating hash ({:?})", hash);
on_done(validate_hash(hash, result))
})).map_err(Into::into)
}
}
|
{
Err(Error::HashMismatch{ got: content_hash, expected: hash })
}
|
conditional_block
|
client.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Hash-addressed content resolver & fetcher.
use std::{io, fs};
use std::sync::Arc;
use std::path::PathBuf;
use util::{Mutex, H256, sha3};
use fetch::{Fetch, FetchError, Client as FetchClient};
use urlhint::{ContractClient, URLHintContract, URLHint, URLHintResult};
/// API for fetching by hash.
pub trait HashFetch {
/// Fetch hash-addressed content.
/// Parameters:
/// 1. `hash` - content hash
/// 2. `on_done` - callback function invoked when the content is ready (or there was error during fetch)
///
/// This function may fail immediately when fetch cannot be initialized or content cannot be resolved.
fn fetch(&self, hash: H256, on_done: Box<Fn(Result<PathBuf, Error>) + Send>) -> Result<(), Error>;
}
/// Hash-fetching error.
#[derive(Debug)]
pub enum Error {
/// Hash could not be resolved to a valid content address.
NoResolution,
/// Downloaded content hash does not match.
HashMismatch { expected: H256, got: H256 },
/// IO Error while validating hash.
IO(io::Error),
/// Error during fetch.
Fetch(FetchError),
}
impl From<FetchError> for Error {
fn from(error: FetchError) -> Self {
Error::Fetch(error)
}
}
impl From<io::Error> for Error {
fn from(error: io::Error) -> Self {
Error::IO(error)
}
}
/// Default Hash-fetching client using on-chain contract to resolve hashes to URLs.
pub struct Client {
contract: URLHintContract,
fetch: Mutex<FetchClient>,
}
impl Client {
/// Creates new instance of the `Client` given on-chain contract client.
pub fn new(contract: Arc<ContractClient>) -> Self {
Client {
contract: URLHintContract::new(contract),
fetch: Mutex::new(FetchClient::default()),
}
}
}
impl HashFetch for Client {
fn fetch(&self, hash: H256, on_done: Box<Fn(Result<PathBuf, Error>) + Send>) -> Result<(), Error> {
debug!(target: "dapps", "Fetching: {:?}", hash);
let url = try!(
self.contract.resolve(hash.to_vec()).map(|content| match content {
URLHintResult::Dapp(dapp) => {
dapp.url()
|
);
debug!(target: "dapps", "Resolved {:?} to {:?}. Fetching...", hash, url);
self.fetch.lock().request_async(&url, Default::default(), Box::new(move |result| {
fn validate_hash(hash: H256, result: Result<PathBuf, FetchError>) -> Result<PathBuf, Error> {
let path = try!(result);
let mut file_reader = io::BufReader::new(try!(fs::File::open(&path)));
let content_hash = try!(sha3(&mut file_reader));
if content_hash!= hash {
Err(Error::HashMismatch{ got: content_hash, expected: hash })
} else {
Ok(path)
}
}
debug!(target: "dapps", "Content fetched, validating hash ({:?})", hash);
on_done(validate_hash(hash, result))
})).map_err(Into::into)
}
}
|
},
URLHintResult::Content(content) => {
content.url
},
}).ok_or_else(|| Error::NoResolution)
|
random_line_split
|
client.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Hash-addressed content resolver & fetcher.
use std::{io, fs};
use std::sync::Arc;
use std::path::PathBuf;
use util::{Mutex, H256, sha3};
use fetch::{Fetch, FetchError, Client as FetchClient};
use urlhint::{ContractClient, URLHintContract, URLHint, URLHintResult};
/// API for fetching by hash.
pub trait HashFetch {
/// Fetch hash-addressed content.
/// Parameters:
/// 1. `hash` - content hash
/// 2. `on_done` - callback function invoked when the content is ready (or there was error during fetch)
///
/// This function may fail immediately when fetch cannot be initialized or content cannot be resolved.
fn fetch(&self, hash: H256, on_done: Box<Fn(Result<PathBuf, Error>) + Send>) -> Result<(), Error>;
}
/// Hash-fetching error.
#[derive(Debug)]
pub enum Error {
/// Hash could not be resolved to a valid content address.
NoResolution,
/// Downloaded content hash does not match.
HashMismatch { expected: H256, got: H256 },
/// IO Error while validating hash.
IO(io::Error),
/// Error during fetch.
Fetch(FetchError),
}
impl From<FetchError> for Error {
fn
|
(error: FetchError) -> Self {
Error::Fetch(error)
}
}
impl From<io::Error> for Error {
fn from(error: io::Error) -> Self {
Error::IO(error)
}
}
/// Default Hash-fetching client using on-chain contract to resolve hashes to URLs.
pub struct Client {
contract: URLHintContract,
fetch: Mutex<FetchClient>,
}
impl Client {
/// Creates new instance of the `Client` given on-chain contract client.
pub fn new(contract: Arc<ContractClient>) -> Self {
Client {
contract: URLHintContract::new(contract),
fetch: Mutex::new(FetchClient::default()),
}
}
}
impl HashFetch for Client {
fn fetch(&self, hash: H256, on_done: Box<Fn(Result<PathBuf, Error>) + Send>) -> Result<(), Error> {
debug!(target: "dapps", "Fetching: {:?}", hash);
let url = try!(
self.contract.resolve(hash.to_vec()).map(|content| match content {
URLHintResult::Dapp(dapp) => {
dapp.url()
},
URLHintResult::Content(content) => {
content.url
},
}).ok_or_else(|| Error::NoResolution)
);
debug!(target: "dapps", "Resolved {:?} to {:?}. Fetching...", hash, url);
self.fetch.lock().request_async(&url, Default::default(), Box::new(move |result| {
fn validate_hash(hash: H256, result: Result<PathBuf, FetchError>) -> Result<PathBuf, Error> {
let path = try!(result);
let mut file_reader = io::BufReader::new(try!(fs::File::open(&path)));
let content_hash = try!(sha3(&mut file_reader));
if content_hash!= hash {
Err(Error::HashMismatch{ got: content_hash, expected: hash })
} else {
Ok(path)
}
}
debug!(target: "dapps", "Content fetched, validating hash ({:?})", hash);
on_done(validate_hash(hash, result))
})).map_err(Into::into)
}
}
|
from
|
identifier_name
|
set_mat_col.rs
|
//! Given:
//!
//! `y <- alpha * A * x`
//!
//! Test that
//!
//! `y[i] = alpha * A[i, :] * x`
//!
//! for any valid `i`
#![feature(custom_attribute)]
#![feature(plugin)]
#![plugin(quickcheck_macros)]
extern crate approx;
extern crate complex;
extern crate linalg;
extern crate quickcheck;
extern crate rand;
#[macro_use]
mod setup;
use complex::{c64, c128};
|
use complex::{c64, c128};
use linalg::prelude::*;
use quickcheck::TestResult;
macro_rules! tests {
($($t:ident),+) => {
$(
#[quickcheck]
fn $t((srow, scol): (u32, u32), (nrows, ncols): (u32, u32), i: u32) -> TestResult {
enforce! {
i < nrows,
ncols!= 0,
}
let a = ::setup::rand::mat((srow + ncols, scol + nrows));
let a = a.slice((srow.., scol..)).t();
let ref x = ::setup::rand::col(ncols);
let alpha: $t = ::setup::rand::scalar();
let mut y = ::setup::rand::col(nrows);
y.set(alpha * a * x);
test_approx_eq!(y[i], alpha * a.row(i) * x)
}
)+
}
}
tests!(f32, f64, c64, c128);
}
macro_rules! tests {
($($t:ident),+) => {
$(
#[quickcheck]
fn $t((srow, scol): (u32, u32), (nrows, ncols): (u32, u32), i: u32) -> TestResult {
enforce! {
i < nrows,
ncols!= 0,
}
let a = ::setup::rand::mat((srow + nrows, scol + ncols));
let a = a.slice((srow.., scol..));
let ref x = ::setup::rand::col(ncols);
let alpha: $t = ::setup::rand::scalar();
let mut y = ::setup::rand::col(nrows);
y.set(alpha * a * x);
test_approx_eq!(y[i], alpha * a.row(i) * x)
}
)+
}
}
tests!(f32, f64, c64, c128);
|
use linalg::prelude::*;
use quickcheck::TestResult;
mod transposed {
|
random_line_split
|
item-attributes.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// These are attributes of the implicit crate. Really this just needs to parse
// for completeness since.rs files linked from.rc files support this
// notation to specify their module's attributes
#![allow(unused_attribute)]
#![attr1 = "val"]
#![attr2 = "val"]
#![attr3]
#![attr4(attr5)]
#![crate_id="foobar#0.1"]
// These are attributes of the following mod
#[attr1 = "val"]
#[attr2 = "val"]
mod test_first_item_in_file_mod {}
mod test_single_attr_outer {
#[attr = "val"]
pub static x: int = 10;
#[attr = "val"]
pub fn f() { }
#[attr = "val"]
pub mod mod1 {}
pub mod rustrt {
#[attr = "val"]
extern {}
}
}
mod test_multi_attr_outer {
#[attr1 = "val"]
#[attr2 = "val"]
pub static x: int = 10;
#[attr1 = "val"]
#[attr2 = "val"]
pub fn f() { }
#[attr1 = "val"]
#[attr2 = "val"]
pub mod mod1 {}
pub mod rustrt {
#[attr1 = "val"]
#[attr2 = "val"]
extern {}
}
#[attr1 = "val"]
#[attr2 = "val"]
struct t {x: int}
}
mod test_stmt_single_attr_outer {
pub fn f() {
#[attr = "val"]
static x: int = 10;
#[attr = "val"]
fn f() { }
#[attr = "val"]
mod mod1 {
}
mod rustrt {
#[attr = "val"]
extern {
}
}
}
}
|
mod test_stmt_multi_attr_outer {
pub fn f() {
#[attr1 = "val"]
#[attr2 = "val"]
static x: int = 10;
#[attr1 = "val"]
#[attr2 = "val"]
fn f() { }
#[attr1 = "val"]
#[attr2 = "val"]
mod mod1 {
}
mod rustrt {
#[attr1 = "val"]
#[attr2 = "val"]
extern {
}
}
}
}
mod test_attr_inner {
pub mod m {
// This is an attribute of mod m
#![attr = "val"]
}
}
mod test_attr_inner_then_outer {
pub mod m {
// This is an attribute of mod m
#![attr = "val"]
// This is an attribute of fn f
#[attr = "val"]
fn f() { }
}
}
mod test_attr_inner_then_outer_multi {
pub mod m {
// This is an attribute of mod m
#![attr1 = "val"]
#![attr2 = "val"]
// This is an attribute of fn f
#[attr1 = "val"]
#[attr2 = "val"]
fn f() { }
}
}
mod test_distinguish_syntax_ext {
pub fn f() {
format!("test{}", "s");
#[attr = "val"]
fn g() { }
}
}
mod test_other_forms {
#[attr]
#[attr(word)]
#[attr(attr(word))]
#[attr(key1 = "val", key2 = "val", attr)]
pub fn f() { }
}
mod test_foreign_items {
pub mod rustrt {
extern crate libc;
extern {
#![attr]
#[attr]
fn rust_get_test_int() -> libc::intptr_t;
}
}
}
// FIXME #623 - these aren't supported yet
/*mod test_literals {
#![str = "s"]
#![char = 'c']
#![int = 100]
#![uint = 100u]
#![mach_int = 100u32]
#![float = 1.0]
#![mach_float = 1.0f32]
#![nil = ()]
#![bool = true]
mod m {}
}*/
fn test_fn_inner() {
#![inner_fn_attr]
}
pub fn main() { }
|
random_line_split
|
|
item-attributes.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// These are attributes of the implicit crate. Really this just needs to parse
// for completeness since.rs files linked from.rc files support this
// notation to specify their module's attributes
#![allow(unused_attribute)]
#![attr1 = "val"]
#![attr2 = "val"]
#![attr3]
#![attr4(attr5)]
#![crate_id="foobar#0.1"]
// These are attributes of the following mod
#[attr1 = "val"]
#[attr2 = "val"]
mod test_first_item_in_file_mod {}
mod test_single_attr_outer {
#[attr = "val"]
pub static x: int = 10;
#[attr = "val"]
pub fn f() { }
#[attr = "val"]
pub mod mod1 {}
pub mod rustrt {
#[attr = "val"]
extern {}
}
}
mod test_multi_attr_outer {
#[attr1 = "val"]
#[attr2 = "val"]
pub static x: int = 10;
#[attr1 = "val"]
#[attr2 = "val"]
pub fn f() { }
#[attr1 = "val"]
#[attr2 = "val"]
pub mod mod1 {}
pub mod rustrt {
#[attr1 = "val"]
#[attr2 = "val"]
extern {}
}
#[attr1 = "val"]
#[attr2 = "val"]
struct t {x: int}
}
mod test_stmt_single_attr_outer {
pub fn f() {
#[attr = "val"]
static x: int = 10;
#[attr = "val"]
fn f()
|
#[attr = "val"]
mod mod1 {
}
mod rustrt {
#[attr = "val"]
extern {
}
}
}
}
mod test_stmt_multi_attr_outer {
pub fn f() {
#[attr1 = "val"]
#[attr2 = "val"]
static x: int = 10;
#[attr1 = "val"]
#[attr2 = "val"]
fn f() { }
#[attr1 = "val"]
#[attr2 = "val"]
mod mod1 {
}
mod rustrt {
#[attr1 = "val"]
#[attr2 = "val"]
extern {
}
}
}
}
mod test_attr_inner {
pub mod m {
// This is an attribute of mod m
#![attr = "val"]
}
}
mod test_attr_inner_then_outer {
pub mod m {
// This is an attribute of mod m
#![attr = "val"]
// This is an attribute of fn f
#[attr = "val"]
fn f() { }
}
}
mod test_attr_inner_then_outer_multi {
pub mod m {
// This is an attribute of mod m
#![attr1 = "val"]
#![attr2 = "val"]
// This is an attribute of fn f
#[attr1 = "val"]
#[attr2 = "val"]
fn f() { }
}
}
mod test_distinguish_syntax_ext {
pub fn f() {
format!("test{}", "s");
#[attr = "val"]
fn g() { }
}
}
mod test_other_forms {
#[attr]
#[attr(word)]
#[attr(attr(word))]
#[attr(key1 = "val", key2 = "val", attr)]
pub fn f() { }
}
mod test_foreign_items {
pub mod rustrt {
extern crate libc;
extern {
#![attr]
#[attr]
fn rust_get_test_int() -> libc::intptr_t;
}
}
}
// FIXME #623 - these aren't supported yet
/*mod test_literals {
#![str = "s"]
#![char = 'c']
#![int = 100]
#![uint = 100u]
#![mach_int = 100u32]
#![float = 1.0]
#![mach_float = 1.0f32]
#![nil = ()]
#![bool = true]
mod m {}
}*/
fn test_fn_inner() {
#![inner_fn_attr]
}
pub fn main() { }
|
{ }
|
identifier_body
|
item-attributes.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// These are attributes of the implicit crate. Really this just needs to parse
// for completeness since.rs files linked from.rc files support this
// notation to specify their module's attributes
#![allow(unused_attribute)]
#![attr1 = "val"]
#![attr2 = "val"]
#![attr3]
#![attr4(attr5)]
#![crate_id="foobar#0.1"]
// These are attributes of the following mod
#[attr1 = "val"]
#[attr2 = "val"]
mod test_first_item_in_file_mod {}
mod test_single_attr_outer {
#[attr = "val"]
pub static x: int = 10;
#[attr = "val"]
pub fn f() { }
#[attr = "val"]
pub mod mod1 {}
pub mod rustrt {
#[attr = "val"]
extern {}
}
}
mod test_multi_attr_outer {
#[attr1 = "val"]
#[attr2 = "val"]
pub static x: int = 10;
#[attr1 = "val"]
#[attr2 = "val"]
pub fn f() { }
#[attr1 = "val"]
#[attr2 = "val"]
pub mod mod1 {}
pub mod rustrt {
#[attr1 = "val"]
#[attr2 = "val"]
extern {}
}
#[attr1 = "val"]
#[attr2 = "val"]
struct t {x: int}
}
mod test_stmt_single_attr_outer {
pub fn f() {
#[attr = "val"]
static x: int = 10;
#[attr = "val"]
fn
|
() { }
#[attr = "val"]
mod mod1 {
}
mod rustrt {
#[attr = "val"]
extern {
}
}
}
}
mod test_stmt_multi_attr_outer {
pub fn f() {
#[attr1 = "val"]
#[attr2 = "val"]
static x: int = 10;
#[attr1 = "val"]
#[attr2 = "val"]
fn f() { }
#[attr1 = "val"]
#[attr2 = "val"]
mod mod1 {
}
mod rustrt {
#[attr1 = "val"]
#[attr2 = "val"]
extern {
}
}
}
}
mod test_attr_inner {
pub mod m {
// This is an attribute of mod m
#![attr = "val"]
}
}
mod test_attr_inner_then_outer {
pub mod m {
// This is an attribute of mod m
#![attr = "val"]
// This is an attribute of fn f
#[attr = "val"]
fn f() { }
}
}
mod test_attr_inner_then_outer_multi {
pub mod m {
// This is an attribute of mod m
#![attr1 = "val"]
#![attr2 = "val"]
// This is an attribute of fn f
#[attr1 = "val"]
#[attr2 = "val"]
fn f() { }
}
}
mod test_distinguish_syntax_ext {
pub fn f() {
format!("test{}", "s");
#[attr = "val"]
fn g() { }
}
}
mod test_other_forms {
#[attr]
#[attr(word)]
#[attr(attr(word))]
#[attr(key1 = "val", key2 = "val", attr)]
pub fn f() { }
}
mod test_foreign_items {
pub mod rustrt {
extern crate libc;
extern {
#![attr]
#[attr]
fn rust_get_test_int() -> libc::intptr_t;
}
}
}
// FIXME #623 - these aren't supported yet
/*mod test_literals {
#![str = "s"]
#![char = 'c']
#![int = 100]
#![uint = 100u]
#![mach_int = 100u32]
#![float = 1.0]
#![mach_float = 1.0f32]
#![nil = ()]
#![bool = true]
mod m {}
}*/
fn test_fn_inner() {
#![inner_fn_attr]
}
pub fn main() { }
|
f
|
identifier_name
|
main.rs
|
extern crate rustbox;
use std::env;
use std::io::Result;
use std::path::Path;
mod filedata;
use filedata::FileData;
mod display;
use display::Display;
fn main()
{
let fd = match init_data()
{
Ok(v) => v,
Err(e) => {
println!("could not open specified file: {}", e);
return;
}
};
let display = Display::new(fd);
display.run();
}
//create a new data object from cmd args
fn init_data() -> Result<FileData>
|
}
|
{
let mut args = env::args().skip(1);
if args.len() > 0
{
let path_str = args.next().unwrap_or(String::new());
let path_str_copy = path_str.clone();
let path = Path::new(&path_str_copy);
if path.exists() == false
{
Ok(FileData::new_with_name(path_str))
}
else
{
FileData::from(path_str)
}
}
else
{
Ok(FileData::new())
}
|
identifier_body
|
main.rs
|
extern crate rustbox;
use std::env;
use std::io::Result;
use std::path::Path;
mod filedata;
use filedata::FileData;
mod display;
use display::Display;
fn main()
{
let fd = match init_data()
{
Ok(v) => v,
Err(e) =>
|
};
let display = Display::new(fd);
display.run();
}
//create a new data object from cmd args
fn init_data() -> Result<FileData>
{
let mut args = env::args().skip(1);
if args.len() > 0
{
let path_str = args.next().unwrap_or(String::new());
let path_str_copy = path_str.clone();
let path = Path::new(&path_str_copy);
if path.exists() == false
{
Ok(FileData::new_with_name(path_str))
}
else
{
FileData::from(path_str)
}
}
else
{
Ok(FileData::new())
}
}
|
{
println!("could not open specified file: {}", e);
return;
}
|
conditional_block
|
main.rs
|
extern crate rustbox;
use std::env;
use std::io::Result;
use std::path::Path;
mod filedata;
use filedata::FileData;
|
mod display;
use display::Display;
fn main()
{
let fd = match init_data()
{
Ok(v) => v,
Err(e) => {
println!("could not open specified file: {}", e);
return;
}
};
let display = Display::new(fd);
display.run();
}
//create a new data object from cmd args
fn init_data() -> Result<FileData>
{
let mut args = env::args().skip(1);
if args.len() > 0
{
let path_str = args.next().unwrap_or(String::new());
let path_str_copy = path_str.clone();
let path = Path::new(&path_str_copy);
if path.exists() == false
{
Ok(FileData::new_with_name(path_str))
}
else
{
FileData::from(path_str)
}
}
else
{
Ok(FileData::new())
}
}
|
random_line_split
|
|
main.rs
|
extern crate rustbox;
use std::env;
use std::io::Result;
use std::path::Path;
mod filedata;
use filedata::FileData;
mod display;
use display::Display;
fn main()
{
let fd = match init_data()
{
Ok(v) => v,
Err(e) => {
println!("could not open specified file: {}", e);
return;
}
};
let display = Display::new(fd);
display.run();
}
//create a new data object from cmd args
fn
|
() -> Result<FileData>
{
let mut args = env::args().skip(1);
if args.len() > 0
{
let path_str = args.next().unwrap_or(String::new());
let path_str_copy = path_str.clone();
let path = Path::new(&path_str_copy);
if path.exists() == false
{
Ok(FileData::new_with_name(path_str))
}
else
{
FileData::from(path_str)
}
}
else
{
Ok(FileData::new())
}
}
|
init_data
|
identifier_name
|
ptr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use bindings::*;
use heapsize::HeapSizeOf;
use std::fmt::{self, Debug};
// Defines an Arc-like type that manages a refcounted Gecko object stored
// in a ThreadSafeFooHolder smart pointer. Used in tandem with the
// NS_DECL_HOLDER_FFI_REFCOUNTING-defined types and functions in Gecko.
macro_rules! define_holder_arc {
($arc_type:ident, $name:ident, $holder_type:ident, $addref: ident, $release: ident) => (
#[derive(PartialEq)]
pub struct $arc_type {
ptr: *mut $holder_type,
}
|
ptr: data
}
}
pub fn as_raw(&self) -> *mut $holder_type { self.ptr }
}
unsafe impl Send for $arc_type {}
unsafe impl Sync for $arc_type {}
impl Clone for $arc_type {
fn clone(&self) -> $arc_type {
$arc_type::new(self.ptr)
}
}
impl Drop for $arc_type {
fn drop(&mut self) {
unsafe { $release(self.ptr); }
}
}
impl HeapSizeOf for $arc_type {
fn heap_size_of_children(&self) -> usize { 0 }
}
impl Debug for $arc_type {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, stringify!($name))
}
}
)
}
define_holder_arc!(GeckoArcPrincipal, Principal, ThreadSafePrincipalHolder,
Gecko_AddRefPrincipalArbitraryThread, Gecko_ReleasePrincipalArbitraryThread);
define_holder_arc!(GeckoArcURI, URI, ThreadSafeURIHolder,
Gecko_AddRefURIArbitraryThread, Gecko_ReleaseURIArbitraryThread);
|
impl $arc_type {
pub fn new(data: *mut $holder_type) -> $arc_type {
debug_assert!(!data.is_null());
unsafe { $addref(data); }
$arc_type {
|
random_line_split
|
write_vectored.rs
|
use crate::io::AsyncWrite;
use futures_core::future::Future;
use futures_core::task::{Context, Poll};
use std::io::{self, IoSlice};
use std::pin::Pin;
/// Future for the [`write_vectored`](super::AsyncWriteExt::write_vectored) method.
#[derive(Debug)]
#[must_use = "futures do nothing unless you `.await` or poll them"]
pub struct
|
<'a, 'b, W:?Sized> {
writer: &'a mut W,
bufs: &'a [IoSlice<'b>],
}
impl<W:?Sized + Unpin> Unpin for WriteVectored<'_, '_, W> {}
impl<'a, 'b, W: AsyncWrite +?Sized + Unpin> WriteVectored<'a, 'b, W> {
pub(super) fn new(writer: &'a mut W, bufs: &'a [IoSlice<'b>]) -> Self {
Self { writer, bufs }
}
}
impl<W: AsyncWrite +?Sized + Unpin> Future for WriteVectored<'_, '_, W> {
type Output = io::Result<usize>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = &mut *self;
Pin::new(&mut this.writer).poll_write_vectored(cx, this.bufs)
}
}
|
WriteVectored
|
identifier_name
|
write_vectored.rs
|
use crate::io::AsyncWrite;
use futures_core::future::Future;
use futures_core::task::{Context, Poll};
use std::io::{self, IoSlice};
use std::pin::Pin;
/// Future for the [`write_vectored`](super::AsyncWriteExt::write_vectored) method.
#[derive(Debug)]
|
#[must_use = "futures do nothing unless you `.await` or poll them"]
pub struct WriteVectored<'a, 'b, W:?Sized> {
writer: &'a mut W,
bufs: &'a [IoSlice<'b>],
}
impl<W:?Sized + Unpin> Unpin for WriteVectored<'_, '_, W> {}
impl<'a, 'b, W: AsyncWrite +?Sized + Unpin> WriteVectored<'a, 'b, W> {
pub(super) fn new(writer: &'a mut W, bufs: &'a [IoSlice<'b>]) -> Self {
Self { writer, bufs }
}
}
impl<W: AsyncWrite +?Sized + Unpin> Future for WriteVectored<'_, '_, W> {
type Output = io::Result<usize>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = &mut *self;
Pin::new(&mut this.writer).poll_write_vectored(cx, this.bufs)
}
}
|
random_line_split
|
|
write_vectored.rs
|
use crate::io::AsyncWrite;
use futures_core::future::Future;
use futures_core::task::{Context, Poll};
use std::io::{self, IoSlice};
use std::pin::Pin;
/// Future for the [`write_vectored`](super::AsyncWriteExt::write_vectored) method.
#[derive(Debug)]
#[must_use = "futures do nothing unless you `.await` or poll them"]
pub struct WriteVectored<'a, 'b, W:?Sized> {
writer: &'a mut W,
bufs: &'a [IoSlice<'b>],
}
impl<W:?Sized + Unpin> Unpin for WriteVectored<'_, '_, W> {}
impl<'a, 'b, W: AsyncWrite +?Sized + Unpin> WriteVectored<'a, 'b, W> {
pub(super) fn new(writer: &'a mut W, bufs: &'a [IoSlice<'b>]) -> Self
|
}
impl<W: AsyncWrite +?Sized + Unpin> Future for WriteVectored<'_, '_, W> {
type Output = io::Result<usize>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = &mut *self;
Pin::new(&mut this.writer).poll_write_vectored(cx, this.bufs)
}
}
|
{
Self { writer, bufs }
}
|
identifier_body
|
htmlbodyelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::{Attr, AttrHelpers};
use dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;
use dom::bindings::codegen::Bindings::HTMLBodyElementBinding::{self, HTMLBodyElementMethods};
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use dom::bindings::codegen::InheritTypes::EventTargetCast;
use dom::bindings::codegen::InheritTypes::{HTMLBodyElementDerived, HTMLElementCast};
use dom::bindings::js::{JSRef, Rootable, Temporary};
use dom::bindings::utils::Reflectable;
use dom::document::Document;
use dom::element::ElementTypeId;
use dom::eventtarget::{EventTarget, EventTargetTypeId, EventTargetHelpers};
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId, window_from_node};
use dom::virtualmethods::VirtualMethods;
use dom::window::WindowHelpers;
use cssparser::RGBA;
use string_cache::Atom;
use util::str::{self, DOMString};
use std::borrow::ToOwned;
use std::cell::Cell;
#[dom_struct]
pub struct HTMLBodyElement {
htmlelement: HTMLElement,
background_color: Cell<Option<RGBA>>,
}
impl HTMLBodyElementDerived for EventTarget {
fn is_htmlbodyelement(&self) -> bool {
*self.type_id() == EventTargetTypeId::Node(NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLBodyElement)))
}
}
impl HTMLBodyElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>)
-> HTMLBodyElement {
HTMLBodyElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLBodyElement,
localName,
prefix,
document),
background_color: Cell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>)
-> Temporary<HTMLBodyElement> {
let element = HTMLBodyElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLBodyElementBinding::Wrap)
}
}
impl<'a> HTMLBodyElementMethods for JSRef<'a, HTMLBodyElement> {
// https://html.spec.whatwg.org/#dom-body-bgcolor
make_getter!(BgColor, "bgcolor");
make_setter!(SetBgColor, "bgcolor");
fn GetOnunload(self) -> Option<EventHandlerNonNull> {
let win = window_from_node(self).root();
win.r().GetOnunload()
}
fn SetOnunload(self, listener: Option<EventHandlerNonNull>)
|
}
pub trait HTMLBodyElementHelpers {
fn get_background_color(&self) -> Option<RGBA>;
}
impl HTMLBodyElementHelpers for HTMLBodyElement {
fn get_background_color(&self) -> Option<RGBA> {
self.background_color.get()
}
}
impl<'a> VirtualMethods for JSRef<'a, HTMLBodyElement> {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let element: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);
Some(element as &VirtualMethods)
}
fn after_set_attr(&self, attr: JSRef<Attr>) {
if let Some(ref s) = self.super_type() {
s.after_set_attr(attr);
}
let name = attr.local_name();
if name.starts_with("on") {
static FORWARDED_EVENTS: &'static [&'static str] =
&["onfocus", "onload", "onscroll", "onafterprint", "onbeforeprint",
"onbeforeunload", "onhashchange", "onlanguagechange", "onmessage",
"onoffline", "ononline", "onpagehide", "onpageshow", "onpopstate",
"onstorage", "onresize", "onunload", "onerror"];
let window = window_from_node(*self).root();
let (cx, url, reflector) = (window.r().get_cx(),
window.r().get_url(),
window.r().reflector().get_jsobject());
let evtarget: JSRef<EventTarget> =
if FORWARDED_EVENTS.iter().any(|&event| &**name == event) {
EventTargetCast::from_ref(window.r())
} else {
EventTargetCast::from_ref(*self)
};
evtarget.set_event_handler_uncompiled(cx, url, reflector,
&name[2..],
(**attr.value()).to_owned());
}
match attr.local_name() {
&atom!("bgcolor") => {
self.background_color.set(str::parse_legacy_color(&attr.value()).ok())
}
_ => {}
}
}
fn before_remove_attr(&self, attr: JSRef<Attr>) {
match self.super_type() {
Some(ref s) => s.before_remove_attr(attr),
_ => {}
}
match attr.local_name() {
&atom!("bgcolor") => self.background_color.set(None),
_ => {}
}
}
}
|
{
let win = window_from_node(self).root();
win.r().SetOnunload(listener)
}
|
identifier_body
|
htmlbodyelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::{Attr, AttrHelpers};
use dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;
use dom::bindings::codegen::Bindings::HTMLBodyElementBinding::{self, HTMLBodyElementMethods};
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use dom::bindings::codegen::InheritTypes::EventTargetCast;
use dom::bindings::codegen::InheritTypes::{HTMLBodyElementDerived, HTMLElementCast};
use dom::bindings::js::{JSRef, Rootable, Temporary};
use dom::bindings::utils::Reflectable;
use dom::document::Document;
use dom::element::ElementTypeId;
use dom::eventtarget::{EventTarget, EventTargetTypeId, EventTargetHelpers};
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId, window_from_node};
use dom::virtualmethods::VirtualMethods;
use dom::window::WindowHelpers;
use cssparser::RGBA;
use string_cache::Atom;
use util::str::{self, DOMString};
use std::borrow::ToOwned;
use std::cell::Cell;
#[dom_struct]
pub struct HTMLBodyElement {
htmlelement: HTMLElement,
background_color: Cell<Option<RGBA>>,
}
impl HTMLBodyElementDerived for EventTarget {
fn is_htmlbodyelement(&self) -> bool {
*self.type_id() == EventTargetTypeId::Node(NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLBodyElement)))
}
}
impl HTMLBodyElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>)
-> HTMLBodyElement {
HTMLBodyElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLBodyElement,
localName,
prefix,
document),
background_color: Cell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>)
-> Temporary<HTMLBodyElement> {
let element = HTMLBodyElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLBodyElementBinding::Wrap)
}
}
impl<'a> HTMLBodyElementMethods for JSRef<'a, HTMLBodyElement> {
// https://html.spec.whatwg.org/#dom-body-bgcolor
make_getter!(BgColor, "bgcolor");
make_setter!(SetBgColor, "bgcolor");
fn GetOnunload(self) -> Option<EventHandlerNonNull> {
let win = window_from_node(self).root();
win.r().GetOnunload()
}
fn SetOnunload(self, listener: Option<EventHandlerNonNull>) {
let win = window_from_node(self).root();
win.r().SetOnunload(listener)
}
}
pub trait HTMLBodyElementHelpers {
fn get_background_color(&self) -> Option<RGBA>;
}
impl HTMLBodyElementHelpers for HTMLBodyElement {
fn get_background_color(&self) -> Option<RGBA> {
self.background_color.get()
}
}
impl<'a> VirtualMethods for JSRef<'a, HTMLBodyElement> {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let element: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);
Some(element as &VirtualMethods)
}
fn after_set_attr(&self, attr: JSRef<Attr>) {
if let Some(ref s) = self.super_type() {
s.after_set_attr(attr);
}
let name = attr.local_name();
if name.starts_with("on") {
static FORWARDED_EVENTS: &'static [&'static str] =
&["onfocus", "onload", "onscroll", "onafterprint", "onbeforeprint",
"onbeforeunload", "onhashchange", "onlanguagechange", "onmessage",
"onoffline", "ononline", "onpagehide", "onpageshow", "onpopstate",
"onstorage", "onresize", "onunload", "onerror"];
let window = window_from_node(*self).root();
let (cx, url, reflector) = (window.r().get_cx(),
window.r().get_url(),
window.r().reflector().get_jsobject());
let evtarget: JSRef<EventTarget> =
if FORWARDED_EVENTS.iter().any(|&event| &**name == event) {
EventTargetCast::from_ref(window.r())
} else {
EventTargetCast::from_ref(*self)
};
evtarget.set_event_handler_uncompiled(cx, url, reflector,
&name[2..],
(**attr.value()).to_owned());
}
match attr.local_name() {
&atom!("bgcolor") => {
self.background_color.set(str::parse_legacy_color(&attr.value()).ok())
}
_ => {}
}
}
fn
|
(&self, attr: JSRef<Attr>) {
match self.super_type() {
Some(ref s) => s.before_remove_attr(attr),
_ => {}
}
match attr.local_name() {
&atom!("bgcolor") => self.background_color.set(None),
_ => {}
}
}
}
|
before_remove_attr
|
identifier_name
|
htmlbodyelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::{Attr, AttrHelpers};
use dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;
use dom::bindings::codegen::Bindings::HTMLBodyElementBinding::{self, HTMLBodyElementMethods};
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use dom::bindings::codegen::InheritTypes::EventTargetCast;
use dom::bindings::codegen::InheritTypes::{HTMLBodyElementDerived, HTMLElementCast};
use dom::bindings::js::{JSRef, Rootable, Temporary};
use dom::bindings::utils::Reflectable;
use dom::document::Document;
use dom::element::ElementTypeId;
use dom::eventtarget::{EventTarget, EventTargetTypeId, EventTargetHelpers};
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId, window_from_node};
use dom::virtualmethods::VirtualMethods;
use dom::window::WindowHelpers;
use cssparser::RGBA;
use string_cache::Atom;
use util::str::{self, DOMString};
use std::borrow::ToOwned;
use std::cell::Cell;
#[dom_struct]
pub struct HTMLBodyElement {
htmlelement: HTMLElement,
background_color: Cell<Option<RGBA>>,
}
impl HTMLBodyElementDerived for EventTarget {
fn is_htmlbodyelement(&self) -> bool {
*self.type_id() == EventTargetTypeId::Node(NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLBodyElement)))
}
}
impl HTMLBodyElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>)
-> HTMLBodyElement {
HTMLBodyElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLBodyElement,
localName,
prefix,
document),
background_color: Cell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>)
-> Temporary<HTMLBodyElement> {
let element = HTMLBodyElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLBodyElementBinding::Wrap)
}
}
impl<'a> HTMLBodyElementMethods for JSRef<'a, HTMLBodyElement> {
// https://html.spec.whatwg.org/#dom-body-bgcolor
make_getter!(BgColor, "bgcolor");
make_setter!(SetBgColor, "bgcolor");
fn GetOnunload(self) -> Option<EventHandlerNonNull> {
let win = window_from_node(self).root();
win.r().GetOnunload()
}
fn SetOnunload(self, listener: Option<EventHandlerNonNull>) {
let win = window_from_node(self).root();
win.r().SetOnunload(listener)
}
}
pub trait HTMLBodyElementHelpers {
fn get_background_color(&self) -> Option<RGBA>;
}
impl HTMLBodyElementHelpers for HTMLBodyElement {
fn get_background_color(&self) -> Option<RGBA> {
self.background_color.get()
}
}
impl<'a> VirtualMethods for JSRef<'a, HTMLBodyElement> {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let element: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);
Some(element as &VirtualMethods)
}
fn after_set_attr(&self, attr: JSRef<Attr>) {
if let Some(ref s) = self.super_type() {
s.after_set_attr(attr);
}
let name = attr.local_name();
if name.starts_with("on") {
static FORWARDED_EVENTS: &'static [&'static str] =
&["onfocus", "onload", "onscroll", "onafterprint", "onbeforeprint",
"onbeforeunload", "onhashchange", "onlanguagechange", "onmessage",
"onoffline", "ononline", "onpagehide", "onpageshow", "onpopstate",
"onstorage", "onresize", "onunload", "onerror"];
let window = window_from_node(*self).root();
let (cx, url, reflector) = (window.r().get_cx(),
|
window.r().reflector().get_jsobject());
let evtarget: JSRef<EventTarget> =
if FORWARDED_EVENTS.iter().any(|&event| &**name == event) {
EventTargetCast::from_ref(window.r())
} else {
EventTargetCast::from_ref(*self)
};
evtarget.set_event_handler_uncompiled(cx, url, reflector,
&name[2..],
(**attr.value()).to_owned());
}
match attr.local_name() {
&atom!("bgcolor") => {
self.background_color.set(str::parse_legacy_color(&attr.value()).ok())
}
_ => {}
}
}
fn before_remove_attr(&self, attr: JSRef<Attr>) {
match self.super_type() {
Some(ref s) => s.before_remove_attr(attr),
_ => {}
}
match attr.local_name() {
&atom!("bgcolor") => self.background_color.set(None),
_ => {}
}
}
}
|
window.r().get_url(),
|
random_line_split
|
htmlbodyelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::{Attr, AttrHelpers};
use dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;
use dom::bindings::codegen::Bindings::HTMLBodyElementBinding::{self, HTMLBodyElementMethods};
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use dom::bindings::codegen::InheritTypes::EventTargetCast;
use dom::bindings::codegen::InheritTypes::{HTMLBodyElementDerived, HTMLElementCast};
use dom::bindings::js::{JSRef, Rootable, Temporary};
use dom::bindings::utils::Reflectable;
use dom::document::Document;
use dom::element::ElementTypeId;
use dom::eventtarget::{EventTarget, EventTargetTypeId, EventTargetHelpers};
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId, window_from_node};
use dom::virtualmethods::VirtualMethods;
use dom::window::WindowHelpers;
use cssparser::RGBA;
use string_cache::Atom;
use util::str::{self, DOMString};
use std::borrow::ToOwned;
use std::cell::Cell;
#[dom_struct]
pub struct HTMLBodyElement {
htmlelement: HTMLElement,
background_color: Cell<Option<RGBA>>,
}
impl HTMLBodyElementDerived for EventTarget {
fn is_htmlbodyelement(&self) -> bool {
*self.type_id() == EventTargetTypeId::Node(NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLBodyElement)))
}
}
impl HTMLBodyElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>)
-> HTMLBodyElement {
HTMLBodyElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLBodyElement,
localName,
prefix,
document),
background_color: Cell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>)
-> Temporary<HTMLBodyElement> {
let element = HTMLBodyElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLBodyElementBinding::Wrap)
}
}
impl<'a> HTMLBodyElementMethods for JSRef<'a, HTMLBodyElement> {
// https://html.spec.whatwg.org/#dom-body-bgcolor
make_getter!(BgColor, "bgcolor");
make_setter!(SetBgColor, "bgcolor");
fn GetOnunload(self) -> Option<EventHandlerNonNull> {
let win = window_from_node(self).root();
win.r().GetOnunload()
}
fn SetOnunload(self, listener: Option<EventHandlerNonNull>) {
let win = window_from_node(self).root();
win.r().SetOnunload(listener)
}
}
pub trait HTMLBodyElementHelpers {
fn get_background_color(&self) -> Option<RGBA>;
}
impl HTMLBodyElementHelpers for HTMLBodyElement {
fn get_background_color(&self) -> Option<RGBA> {
self.background_color.get()
}
}
impl<'a> VirtualMethods for JSRef<'a, HTMLBodyElement> {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let element: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);
Some(element as &VirtualMethods)
}
fn after_set_attr(&self, attr: JSRef<Attr>) {
if let Some(ref s) = self.super_type() {
s.after_set_attr(attr);
}
let name = attr.local_name();
if name.starts_with("on") {
static FORWARDED_EVENTS: &'static [&'static str] =
&["onfocus", "onload", "onscroll", "onafterprint", "onbeforeprint",
"onbeforeunload", "onhashchange", "onlanguagechange", "onmessage",
"onoffline", "ononline", "onpagehide", "onpageshow", "onpopstate",
"onstorage", "onresize", "onunload", "onerror"];
let window = window_from_node(*self).root();
let (cx, url, reflector) = (window.r().get_cx(),
window.r().get_url(),
window.r().reflector().get_jsobject());
let evtarget: JSRef<EventTarget> =
if FORWARDED_EVENTS.iter().any(|&event| &**name == event) {
EventTargetCast::from_ref(window.r())
} else
|
;
evtarget.set_event_handler_uncompiled(cx, url, reflector,
&name[2..],
(**attr.value()).to_owned());
}
match attr.local_name() {
&atom!("bgcolor") => {
self.background_color.set(str::parse_legacy_color(&attr.value()).ok())
}
_ => {}
}
}
fn before_remove_attr(&self, attr: JSRef<Attr>) {
match self.super_type() {
Some(ref s) => s.before_remove_attr(attr),
_ => {}
}
match attr.local_name() {
&atom!("bgcolor") => self.background_color.set(None),
_ => {}
}
}
}
|
{
EventTargetCast::from_ref(*self)
}
|
conditional_block
|
prng.rs
|
use core::usize;
use rng::Rng;
// https://en.wikipedia.org/wiki/Linear_congruential_generator#Parameters_in_common_use
#[cfg(target_pointer_width = "32")]
pub const OFFSET: usize = 1013904223;
#[cfg(target_pointer_width = "32")]
pub const MULTIPLIER: usize = 1664525;
#[cfg(target_pointer_width = "64")]
pub const OFFSET: usize = 1442695040888963407;
#[cfg(target_pointer_width = "64")]
pub const MULTIPLIER: usize = 6364136223846793005;
pub const MAX: usize = usize::MAX as usize;
pub struct Prng {
seed: usize,
}
impl Prng {
#[inline]
pub fn new() -> Self {
Prng {
// get a value for initial seed
seed: &false as *const _ as usize,
}
}
#[inline]
pub fn seed(&self) -> usize { self.seed }
#[inline]
pub fn
|
(&mut self, seed: usize) {
self.seed = seed;
}
}
impl Rng for Prng {
// http://indiegamr.com/generate-repeatable-random-numbers-in-js/
#[inline]
fn next(&mut self) -> usize {
self.seed = ((MULTIPLIER.wrapping_mul(self.seed)).wrapping_add(OFFSET)) % MAX;
self.seed
}
}
|
set_seed
|
identifier_name
|
prng.rs
|
use core::usize;
use rng::Rng;
// https://en.wikipedia.org/wiki/Linear_congruential_generator#Parameters_in_common_use
#[cfg(target_pointer_width = "32")]
pub const OFFSET: usize = 1013904223;
#[cfg(target_pointer_width = "32")]
pub const MULTIPLIER: usize = 1664525;
#[cfg(target_pointer_width = "64")]
pub const OFFSET: usize = 1442695040888963407;
#[cfg(target_pointer_width = "64")]
pub const MULTIPLIER: usize = 6364136223846793005;
pub const MAX: usize = usize::MAX as usize;
pub struct Prng {
seed: usize,
}
impl Prng {
#[inline]
pub fn new() -> Self {
Prng {
// get a value for initial seed
seed: &false as *const _ as usize,
}
}
#[inline]
pub fn seed(&self) -> usize { self.seed }
#[inline]
pub fn set_seed(&mut self, seed: usize)
|
}
impl Rng for Prng {
// http://indiegamr.com/generate-repeatable-random-numbers-in-js/
#[inline]
fn next(&mut self) -> usize {
self.seed = ((MULTIPLIER.wrapping_mul(self.seed)).wrapping_add(OFFSET)) % MAX;
self.seed
}
}
|
{
self.seed = seed;
}
|
identifier_body
|
prng.rs
|
use core::usize;
use rng::Rng;
// https://en.wikipedia.org/wiki/Linear_congruential_generator#Parameters_in_common_use
#[cfg(target_pointer_width = "32")]
pub const OFFSET: usize = 1013904223;
#[cfg(target_pointer_width = "32")]
pub const MULTIPLIER: usize = 1664525;
#[cfg(target_pointer_width = "64")]
pub const OFFSET: usize = 1442695040888963407;
|
pub const MAX: usize = usize::MAX as usize;
pub struct Prng {
seed: usize,
}
impl Prng {
#[inline]
pub fn new() -> Self {
Prng {
// get a value for initial seed
seed: &false as *const _ as usize,
}
}
#[inline]
pub fn seed(&self) -> usize { self.seed }
#[inline]
pub fn set_seed(&mut self, seed: usize) {
self.seed = seed;
}
}
impl Rng for Prng {
// http://indiegamr.com/generate-repeatable-random-numbers-in-js/
#[inline]
fn next(&mut self) -> usize {
self.seed = ((MULTIPLIER.wrapping_mul(self.seed)).wrapping_add(OFFSET)) % MAX;
self.seed
}
}
|
#[cfg(target_pointer_width = "64")]
pub const MULTIPLIER: usize = 6364136223846793005;
|
random_line_split
|
state.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use super::test_common::*;
use pod_state::PodState;
use trace;
use client::{EvmTestClient, EvmTestError, TransactResult};
use ethjson;
use transaction::SignedTransaction;
use vm::EnvInfo;
pub fn json_chain_test(json_data: &[u8]) -> Vec<String> {
::ethcore_logger::init_log();
let tests = ethjson::state::test::Test::load(json_data).unwrap();
let mut failed = Vec::new();
for (name, test) in tests.into_iter() {
{
let multitransaction = test.transaction;
let env: EnvInfo = test.env.into();
let pre: PodState = test.pre_state.into();
for (spec_name, states) in test.post_states {
let total = states.len();
let spec = match EvmTestClient::spec_from_json(&spec_name) {
Some(spec) => spec,
None => {
println!(" - {} | {:?} Ignoring tests because of missing spec", name, spec_name);
continue;
}
};
for (i, state) in states.into_iter().enumerate() {
let info = format!(" - {} | {:?} ({}/{})...", name, spec_name, i + 1, total);
let post_root: H256 = state.hash.into();
let transaction: SignedTransaction = multitransaction.select(&state.indexes).into();
let result = || -> Result<_, EvmTestError> {
Ok(EvmTestClient::from_pod_state(spec, pre.clone())?
.transact(&env, transaction, trace::NoopVMTracer))
};
match result() {
Err(err) => {
println!("{}!!! Unexpected internal error: {:?}", info, err);
flushln!("{} fail", info);
failed.push(name.clone());
},
Ok(TransactResult::Ok { state_root,.. }) if state_root!= post_root => {
println!("{}!!! State mismatch (got: {}, expect: {}", info, state_root, post_root);
flushln!("{} fail", info);
failed.push(name.clone());
},
Ok(TransactResult::Err { state_root, ref error }) if state_root!= post_root => {
println!("{}!!! State mismatch (got: {}, expect: {}", info, state_root, post_root);
println!("{}!!! Execution error: {:?}", info, error);
flushln!("{} fail", info);
failed.push(name.clone());
},
Ok(TransactResult::Err { error,.. }) => {
flushln!("{} ok ({:?})", info, error);
},
Ok(_) => {
flushln!("{} ok", info);
},
}
}
}
}
}
if!failed.is_empty() {
println!("!!! {:?} tests failed.", failed.len());
}
failed
}
mod state_tests {
use super::json_chain_test;
fn do_json_test(json_data: &[u8]) -> Vec<String>
|
declare_test!{GeneralStateTest_stAttackTest, "GeneralStateTests/stAttackTest/"}
declare_test!{GeneralStateTest_stBadOpcodeTest, "GeneralStateTests/stBadOpcode/"}
declare_test!{GeneralStateTest_stCallCodes, "GeneralStateTests/stCallCodes/"}
declare_test!{GeneralStateTest_stCallDelegateCodesCallCodeHomestead, "GeneralStateTests/stCallDelegateCodesCallCodeHomestead/"}
declare_test!{GeneralStateTest_stCallDelegateCodesHomestead, "GeneralStateTests/stCallDelegateCodesHomestead/"}
declare_test!{GeneralStateTest_stChangedEIP150, "GeneralStateTests/stChangedEIP150/"}
declare_test!{GeneralStateTest_stCodeSizeLimit, "GeneralStateTests/stCodeSizeLimit/"}
declare_test!{GeneralStateTest_stCreateTest, "GeneralStateTests/stCreateTest/"}
declare_test!{GeneralStateTest_stDelegatecallTestHomestead, "GeneralStateTests/stDelegatecallTestHomestead/"}
declare_test!{GeneralStateTest_stEIP150singleCodeGasPrices, "GeneralStateTests/stEIP150singleCodeGasPrices/"}
declare_test!{GeneralStateTest_stEIP150Specific, "GeneralStateTests/stEIP150Specific/"}
declare_test!{GeneralStateTest_stEIP158Specific, "GeneralStateTests/stEIP158Specific/"}
declare_test!{GeneralStateTest_stExample, "GeneralStateTests/stExample/"}
declare_test!{GeneralStateTest_stHomesteadSpecific, "GeneralStateTests/stHomesteadSpecific/"}
declare_test!{GeneralStateTest_stInitCodeTest, "GeneralStateTests/stInitCodeTest/"}
declare_test!{GeneralStateTest_stLogTests, "GeneralStateTests/stLogTests/"}
declare_test!{GeneralStateTest_stMemExpandingEIP150Calls, "GeneralStateTests/stMemExpandingEIP150Calls/"}
declare_test!{heavy => GeneralStateTest_stMemoryStressTest, "GeneralStateTests/stMemoryStressTest/"}
declare_test!{GeneralStateTest_stMemoryTest, "GeneralStateTests/stMemoryTest/"}
declare_test!{GeneralStateTest_stNonZeroCallsTest, "GeneralStateTests/stNonZeroCallsTest/"}
declare_test!{GeneralStateTest_stPreCompiledContracts, "GeneralStateTests/stPreCompiledContracts/"}
declare_test!{heavy => GeneralStateTest_stQuadraticComplexityTest, "GeneralStateTests/stQuadraticComplexityTest/"}
declare_test!{GeneralStateTest_stRandom, "GeneralStateTests/stRandom/"}
declare_test!{GeneralStateTest_stRecursiveCreate, "GeneralStateTests/stRecursiveCreate/"}
declare_test!{GeneralStateTest_stRefundTest, "GeneralStateTests/stRefundTest/"}
declare_test!{GeneralStateTest_stReturnDataTest, "GeneralStateTests/stReturnDataTest/"}
declare_test!{GeneralStateTest_stRevertTest, "GeneralStateTests/stRevertTest/"}
declare_test!{GeneralStateTest_stSolidityTest, "GeneralStateTests/stSolidityTest/"}
declare_test!{GeneralStateTest_stSpecialTest, "GeneralStateTests/stSpecialTest/"}
declare_test!{GeneralStateTest_stStackTests, "GeneralStateTests/stStackTests/"}
declare_test!{GeneralStateTest_stStaticCall, "GeneralStateTests/stStaticCall/"}
declare_test!{GeneralStateTest_stSystemOperationsTest, "GeneralStateTests/stSystemOperationsTest/"}
declare_test!{GeneralStateTest_stTransactionTest, "GeneralStateTests/stTransactionTest/"}
declare_test!{GeneralStateTest_stTransitionTest, "GeneralStateTests/stTransitionTest/"}
declare_test!{GeneralStateTest_stWalletTest, "GeneralStateTests/stWalletTest/"}
declare_test!{GeneralStateTest_stZeroCallsRevert, "GeneralStateTests/stZeroCallsRevert/"}
declare_test!{GeneralStateTest_stZeroCallsTest, "GeneralStateTests/stZeroCallsTest/"}
declare_test!{GeneralStateTest_stZeroKnowledge, "GeneralStateTests/stZeroKnowledge/"}
}
|
{
json_chain_test(json_data)
}
|
identifier_body
|
state.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use super::test_common::*;
use pod_state::PodState;
use trace;
use client::{EvmTestClient, EvmTestError, TransactResult};
use ethjson;
use transaction::SignedTransaction;
use vm::EnvInfo;
pub fn json_chain_test(json_data: &[u8]) -> Vec<String> {
::ethcore_logger::init_log();
let tests = ethjson::state::test::Test::load(json_data).unwrap();
let mut failed = Vec::new();
for (name, test) in tests.into_iter() {
{
let multitransaction = test.transaction;
let env: EnvInfo = test.env.into();
let pre: PodState = test.pre_state.into();
for (spec_name, states) in test.post_states {
let total = states.len();
let spec = match EvmTestClient::spec_from_json(&spec_name) {
Some(spec) => spec,
None => {
println!(" - {} | {:?} Ignoring tests because of missing spec", name, spec_name);
continue;
}
};
for (i, state) in states.into_iter().enumerate() {
let info = format!(" - {} | {:?} ({}/{})...", name, spec_name, i + 1, total);
let post_root: H256 = state.hash.into();
|
let transaction: SignedTransaction = multitransaction.select(&state.indexes).into();
let result = || -> Result<_, EvmTestError> {
Ok(EvmTestClient::from_pod_state(spec, pre.clone())?
.transact(&env, transaction, trace::NoopVMTracer))
};
match result() {
Err(err) => {
println!("{}!!! Unexpected internal error: {:?}", info, err);
flushln!("{} fail", info);
failed.push(name.clone());
},
Ok(TransactResult::Ok { state_root,.. }) if state_root!= post_root => {
println!("{}!!! State mismatch (got: {}, expect: {}", info, state_root, post_root);
flushln!("{} fail", info);
failed.push(name.clone());
},
Ok(TransactResult::Err { state_root, ref error }) if state_root!= post_root => {
println!("{}!!! State mismatch (got: {}, expect: {}", info, state_root, post_root);
println!("{}!!! Execution error: {:?}", info, error);
flushln!("{} fail", info);
failed.push(name.clone());
},
Ok(TransactResult::Err { error,.. }) => {
flushln!("{} ok ({:?})", info, error);
},
Ok(_) => {
flushln!("{} ok", info);
},
}
}
}
}
}
if!failed.is_empty() {
println!("!!! {:?} tests failed.", failed.len());
}
failed
}
mod state_tests {
use super::json_chain_test;
fn do_json_test(json_data: &[u8]) -> Vec<String> {
json_chain_test(json_data)
}
declare_test!{GeneralStateTest_stAttackTest, "GeneralStateTests/stAttackTest/"}
declare_test!{GeneralStateTest_stBadOpcodeTest, "GeneralStateTests/stBadOpcode/"}
declare_test!{GeneralStateTest_stCallCodes, "GeneralStateTests/stCallCodes/"}
declare_test!{GeneralStateTest_stCallDelegateCodesCallCodeHomestead, "GeneralStateTests/stCallDelegateCodesCallCodeHomestead/"}
declare_test!{GeneralStateTest_stCallDelegateCodesHomestead, "GeneralStateTests/stCallDelegateCodesHomestead/"}
declare_test!{GeneralStateTest_stChangedEIP150, "GeneralStateTests/stChangedEIP150/"}
declare_test!{GeneralStateTest_stCodeSizeLimit, "GeneralStateTests/stCodeSizeLimit/"}
declare_test!{GeneralStateTest_stCreateTest, "GeneralStateTests/stCreateTest/"}
declare_test!{GeneralStateTest_stDelegatecallTestHomestead, "GeneralStateTests/stDelegatecallTestHomestead/"}
declare_test!{GeneralStateTest_stEIP150singleCodeGasPrices, "GeneralStateTests/stEIP150singleCodeGasPrices/"}
declare_test!{GeneralStateTest_stEIP150Specific, "GeneralStateTests/stEIP150Specific/"}
declare_test!{GeneralStateTest_stEIP158Specific, "GeneralStateTests/stEIP158Specific/"}
declare_test!{GeneralStateTest_stExample, "GeneralStateTests/stExample/"}
declare_test!{GeneralStateTest_stHomesteadSpecific, "GeneralStateTests/stHomesteadSpecific/"}
declare_test!{GeneralStateTest_stInitCodeTest, "GeneralStateTests/stInitCodeTest/"}
declare_test!{GeneralStateTest_stLogTests, "GeneralStateTests/stLogTests/"}
declare_test!{GeneralStateTest_stMemExpandingEIP150Calls, "GeneralStateTests/stMemExpandingEIP150Calls/"}
declare_test!{heavy => GeneralStateTest_stMemoryStressTest, "GeneralStateTests/stMemoryStressTest/"}
declare_test!{GeneralStateTest_stMemoryTest, "GeneralStateTests/stMemoryTest/"}
declare_test!{GeneralStateTest_stNonZeroCallsTest, "GeneralStateTests/stNonZeroCallsTest/"}
declare_test!{GeneralStateTest_stPreCompiledContracts, "GeneralStateTests/stPreCompiledContracts/"}
declare_test!{heavy => GeneralStateTest_stQuadraticComplexityTest, "GeneralStateTests/stQuadraticComplexityTest/"}
declare_test!{GeneralStateTest_stRandom, "GeneralStateTests/stRandom/"}
declare_test!{GeneralStateTest_stRecursiveCreate, "GeneralStateTests/stRecursiveCreate/"}
declare_test!{GeneralStateTest_stRefundTest, "GeneralStateTests/stRefundTest/"}
declare_test!{GeneralStateTest_stReturnDataTest, "GeneralStateTests/stReturnDataTest/"}
declare_test!{GeneralStateTest_stRevertTest, "GeneralStateTests/stRevertTest/"}
declare_test!{GeneralStateTest_stSolidityTest, "GeneralStateTests/stSolidityTest/"}
declare_test!{GeneralStateTest_stSpecialTest, "GeneralStateTests/stSpecialTest/"}
declare_test!{GeneralStateTest_stStackTests, "GeneralStateTests/stStackTests/"}
declare_test!{GeneralStateTest_stStaticCall, "GeneralStateTests/stStaticCall/"}
declare_test!{GeneralStateTest_stSystemOperationsTest, "GeneralStateTests/stSystemOperationsTest/"}
declare_test!{GeneralStateTest_stTransactionTest, "GeneralStateTests/stTransactionTest/"}
declare_test!{GeneralStateTest_stTransitionTest, "GeneralStateTests/stTransitionTest/"}
declare_test!{GeneralStateTest_stWalletTest, "GeneralStateTests/stWalletTest/"}
declare_test!{GeneralStateTest_stZeroCallsRevert, "GeneralStateTests/stZeroCallsRevert/"}
declare_test!{GeneralStateTest_stZeroCallsTest, "GeneralStateTests/stZeroCallsTest/"}
declare_test!{GeneralStateTest_stZeroKnowledge, "GeneralStateTests/stZeroKnowledge/"}
}
|
random_line_split
|
|
state.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use super::test_common::*;
use pod_state::PodState;
use trace;
use client::{EvmTestClient, EvmTestError, TransactResult};
use ethjson;
use transaction::SignedTransaction;
use vm::EnvInfo;
pub fn json_chain_test(json_data: &[u8]) -> Vec<String> {
::ethcore_logger::init_log();
let tests = ethjson::state::test::Test::load(json_data).unwrap();
let mut failed = Vec::new();
for (name, test) in tests.into_iter() {
{
let multitransaction = test.transaction;
let env: EnvInfo = test.env.into();
let pre: PodState = test.pre_state.into();
for (spec_name, states) in test.post_states {
let total = states.len();
let spec = match EvmTestClient::spec_from_json(&spec_name) {
Some(spec) => spec,
None => {
println!(" - {} | {:?} Ignoring tests because of missing spec", name, spec_name);
continue;
}
};
for (i, state) in states.into_iter().enumerate() {
let info = format!(" - {} | {:?} ({}/{})...", name, spec_name, i + 1, total);
let post_root: H256 = state.hash.into();
let transaction: SignedTransaction = multitransaction.select(&state.indexes).into();
let result = || -> Result<_, EvmTestError> {
Ok(EvmTestClient::from_pod_state(spec, pre.clone())?
.transact(&env, transaction, trace::NoopVMTracer))
};
match result() {
Err(err) => {
println!("{}!!! Unexpected internal error: {:?}", info, err);
flushln!("{} fail", info);
failed.push(name.clone());
},
Ok(TransactResult::Ok { state_root,.. }) if state_root!= post_root => {
println!("{}!!! State mismatch (got: {}, expect: {}", info, state_root, post_root);
flushln!("{} fail", info);
failed.push(name.clone());
},
Ok(TransactResult::Err { state_root, ref error }) if state_root!= post_root => {
println!("{}!!! State mismatch (got: {}, expect: {}", info, state_root, post_root);
println!("{}!!! Execution error: {:?}", info, error);
flushln!("{} fail", info);
failed.push(name.clone());
},
Ok(TransactResult::Err { error,.. }) => {
flushln!("{} ok ({:?})", info, error);
},
Ok(_) => {
flushln!("{} ok", info);
},
}
}
}
}
}
if!failed.is_empty() {
println!("!!! {:?} tests failed.", failed.len());
}
failed
}
mod state_tests {
use super::json_chain_test;
fn
|
(json_data: &[u8]) -> Vec<String> {
json_chain_test(json_data)
}
declare_test!{GeneralStateTest_stAttackTest, "GeneralStateTests/stAttackTest/"}
declare_test!{GeneralStateTest_stBadOpcodeTest, "GeneralStateTests/stBadOpcode/"}
declare_test!{GeneralStateTest_stCallCodes, "GeneralStateTests/stCallCodes/"}
declare_test!{GeneralStateTest_stCallDelegateCodesCallCodeHomestead, "GeneralStateTests/stCallDelegateCodesCallCodeHomestead/"}
declare_test!{GeneralStateTest_stCallDelegateCodesHomestead, "GeneralStateTests/stCallDelegateCodesHomestead/"}
declare_test!{GeneralStateTest_stChangedEIP150, "GeneralStateTests/stChangedEIP150/"}
declare_test!{GeneralStateTest_stCodeSizeLimit, "GeneralStateTests/stCodeSizeLimit/"}
declare_test!{GeneralStateTest_stCreateTest, "GeneralStateTests/stCreateTest/"}
declare_test!{GeneralStateTest_stDelegatecallTestHomestead, "GeneralStateTests/stDelegatecallTestHomestead/"}
declare_test!{GeneralStateTest_stEIP150singleCodeGasPrices, "GeneralStateTests/stEIP150singleCodeGasPrices/"}
declare_test!{GeneralStateTest_stEIP150Specific, "GeneralStateTests/stEIP150Specific/"}
declare_test!{GeneralStateTest_stEIP158Specific, "GeneralStateTests/stEIP158Specific/"}
declare_test!{GeneralStateTest_stExample, "GeneralStateTests/stExample/"}
declare_test!{GeneralStateTest_stHomesteadSpecific, "GeneralStateTests/stHomesteadSpecific/"}
declare_test!{GeneralStateTest_stInitCodeTest, "GeneralStateTests/stInitCodeTest/"}
declare_test!{GeneralStateTest_stLogTests, "GeneralStateTests/stLogTests/"}
declare_test!{GeneralStateTest_stMemExpandingEIP150Calls, "GeneralStateTests/stMemExpandingEIP150Calls/"}
declare_test!{heavy => GeneralStateTest_stMemoryStressTest, "GeneralStateTests/stMemoryStressTest/"}
declare_test!{GeneralStateTest_stMemoryTest, "GeneralStateTests/stMemoryTest/"}
declare_test!{GeneralStateTest_stNonZeroCallsTest, "GeneralStateTests/stNonZeroCallsTest/"}
declare_test!{GeneralStateTest_stPreCompiledContracts, "GeneralStateTests/stPreCompiledContracts/"}
declare_test!{heavy => GeneralStateTest_stQuadraticComplexityTest, "GeneralStateTests/stQuadraticComplexityTest/"}
declare_test!{GeneralStateTest_stRandom, "GeneralStateTests/stRandom/"}
declare_test!{GeneralStateTest_stRecursiveCreate, "GeneralStateTests/stRecursiveCreate/"}
declare_test!{GeneralStateTest_stRefundTest, "GeneralStateTests/stRefundTest/"}
declare_test!{GeneralStateTest_stReturnDataTest, "GeneralStateTests/stReturnDataTest/"}
declare_test!{GeneralStateTest_stRevertTest, "GeneralStateTests/stRevertTest/"}
declare_test!{GeneralStateTest_stSolidityTest, "GeneralStateTests/stSolidityTest/"}
declare_test!{GeneralStateTest_stSpecialTest, "GeneralStateTests/stSpecialTest/"}
declare_test!{GeneralStateTest_stStackTests, "GeneralStateTests/stStackTests/"}
declare_test!{GeneralStateTest_stStaticCall, "GeneralStateTests/stStaticCall/"}
declare_test!{GeneralStateTest_stSystemOperationsTest, "GeneralStateTests/stSystemOperationsTest/"}
declare_test!{GeneralStateTest_stTransactionTest, "GeneralStateTests/stTransactionTest/"}
declare_test!{GeneralStateTest_stTransitionTest, "GeneralStateTests/stTransitionTest/"}
declare_test!{GeneralStateTest_stWalletTest, "GeneralStateTests/stWalletTest/"}
declare_test!{GeneralStateTest_stZeroCallsRevert, "GeneralStateTests/stZeroCallsRevert/"}
declare_test!{GeneralStateTest_stZeroCallsTest, "GeneralStateTests/stZeroCallsTest/"}
declare_test!{GeneralStateTest_stZeroKnowledge, "GeneralStateTests/stZeroKnowledge/"}
}
|
do_json_test
|
identifier_name
|
lib.rs
|
// Copyright 2016 Peter Beard
// Distributed under the GNU GPL v2. For full terms, see the LICENSE file.
//
// Various functions that seem to get used a lot
#![feature(test)]
extern crate test;
use std::ops;
pub mod bigint;
/// Find the sum of a Vec
pub fn sum<T: Copy + ops::Add<T, Output=T>>(v: &[T], initial: T) -> T {
v.iter().fold(initial, |acc, &x| acc + x)
}
/// Determine whether a number is a triangular number
pub fn is_triangular(p: u64) -> bool {
let n = (-1.0 + (1.0 + 8.0*(p as f64)).sqrt()) / 2.0;
n == n.floor() && n > 0.0
}
/// Determine whether a number is a pentagonal number
pub fn
|
(p: u64) -> bool {
let n = (1.0 + (1.0 + 24.0*(p as f64)).sqrt()) / 6.0;
n == n.floor()
}
/// Determine whether a number is a hexagonal number
pub fn is_hexagonal(p: u64) -> bool {
let n = (1.0 + (1.0 + 8.0*(p as f64)).sqrt()) / 4.0;
n == n.floor()
}
/// Determine whether a number is prime
pub fn is_prime(n: i64) -> bool {
// Negative numbers, zero, and one are not prime
if n < 2 {
return false;
}
// We don't need to look at factors > sqrt(n)
let max_factor = ((n as f64).sqrt() as i64) + 1;
for f in (2..max_factor).rev() {
if n % f == 0 {
return false;
}
}
true
}
/// Calculate the sum of all proper divisors of n
pub fn sum_of_divisors(n: u32) -> u32 {
if n < 2 {
return n
}
let max_factor = (n as f32).sqrt() as u32 + 1;
let mut sum = 1;
for i in 2..max_factor {
if n % i == 0 {
sum += i;
if i!= n / i {
sum += n / i;
}
}
}
sum
}
/// Split an integer into its digits
pub fn get_digits(n: u32) -> Vec<u32> {
let mut mut_n = n;
// 2^32 is only 10 digits
let mut digits: Vec<u32> = Vec::with_capacity(10);
while mut_n >= 1 {
digits.push(mut_n % 10);
mut_n /= 10;
}
digits.reverse();
digits
}
/// Count the number of digits in a number
pub fn count_digits(n: u32) -> u32 {
// This is about 10x faster than calculating log10(n)
let mut count = 0;
let mut step = 1;
while step <= n {
count += 1;
step *= 10;
}
count
}
/// Use the sieve of Eratosthenes to mark numbers as prime or composite in [0, n)
pub fn prime_sieve(n: u32) -> Vec<bool> {
let mut primes: Vec<bool> = vec![true; n as usize];
primes[0] = false;
primes[1] = false;
for p in 2..n {
if primes[p as usize] {
// If n is prime, all multiples of n are composite
let mut q = 2*p;
while q < n {
primes[q as usize] = false;
q += p;
}
}
}
primes
}
/// Get a list of all the prime numbers in [0, n)
pub fn primes_upto(n: u32) -> Vec<u32> {
let is_prime = prime_sieve(n+1);
// Use a linear approximation of Pi(n) to estimate the number of primes < n
let mut primes: Vec<u32> = Vec::with_capacity(((0.2f32 * (n as f32)) + 2.6f32) as usize);
for p in 2..n {
if is_prime[p as usize] {
primes.push(p);
}
}
primes
}
/// Determine whether a number is l..k pandigital
pub fn is_pandigital(n: u32, l: u32, k: u32) -> bool {
if l == k || k < l {
return false;
}
let mut digits: Vec<bool> = vec![false; (k + 1) as usize];
let mut value = 1;
for _ in l..(k + 1) {
let d = (n % (10*value))/value;
if d > k {
return false;
}
digits[d as usize] = true;
value *= 10;
}
for i in l..(k + 1) {
if!digits[i as usize] {
return false;
}
}
for i in 0..l {
if digits[i as usize] {
return false;
}
}
true
}
#[cfg(test)]
mod tests {
use super::*;
use test::{Bencher, black_box};
#[test]
fn test_sum() {
assert_eq!(15u8, sum::<u8>(&vec![1,2,3,4,5], 0));
assert_eq!(15u16, sum::<u16>(&vec![1,2,3,4,5], 0));
assert_eq!(15u32, sum::<u32>(&vec![1,2,3,4,5], 0));
assert_eq!(15u64, sum::<u64>(&vec![1,2,3,4,5], 0));
assert_eq!(15i8, sum::<i8>(&vec![1,2,3,4,5], 0));
assert_eq!(15i16, sum::<i16>(&vec![1,2,3,4,5], 0));
assert_eq!(15i32, sum::<i32>(&vec![1,2,3,4,5], 0));
assert_eq!(15i64, sum::<i64>(&vec![1,2,3,4,5], 0));
assert_eq!(15f32, sum::<f32>(&vec![1.0,2.0,3.0,4.0,5.0], 0.0));
assert_eq!(15f64, sum::<f64>(&vec![1.0,2.0,3.0,4.0,5.0], 0.0));
}
#[test]
fn is_2_prime() {
assert!(is_prime(2));
}
#[test]
fn is_13_prime() {
assert!(is_prime(13));
}
#[test]
fn is_1_prime() {
assert_eq!(false, is_prime(1));
}
#[test]
fn is_0_prime() {
assert_eq!(false, is_prime(0));
}
#[test]
fn sum_of_divisors_0() {
assert_eq!(0, sum_of_divisors(0));
}
#[test]
fn sum_of_divisors_10() {
assert_eq!(8, sum_of_divisors(10));
}
#[test]
fn sum_of_divisors_28() {
assert_eq!(28, sum_of_divisors(28));
}
#[test]
fn test_get_digits() {
let n = 12345;
let digits = vec![1,2,3,4,5];
assert_eq!(digits, get_digits(n));
}
#[test]
fn test_count_digits() {
assert_eq!(2, count_digits(10));
assert_eq!(5, count_digits(12345));
}
#[test]
fn test_prime_sieve() {
let primes = vec![false, false, true, true, false, true, false, true, false, false];
assert_eq!(primes, prime_sieve(10));
}
#[test]
fn test_primes_upto() {
let primes = vec![2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31];
assert_eq!(primes, primes_upto(32));
}
#[test]
fn test_is_pandigital() {
assert_eq!(false, is_pandigital(12345678, 1, 9));
assert_eq!(false, is_pandigital(1234567890, 1, 9));
assert!(is_pandigital(1234, 1, 4));
assert!(is_pandigital(3456, 3, 6));
assert_eq!(false, is_pandigital(1234, 1, 1));
assert_eq!(false, is_pandigital(1234, 4, 1));
}
#[test]
fn test_is_triangular() {
assert!(is_triangular(1));
assert!(is_triangular(10));
assert_eq!(false, is_triangular(11));
assert_eq!(false, is_triangular(0));
}
#[test]
fn test_is_pentagonal() {
assert!(is_pentagonal(1));
assert!(is_pentagonal(12));
assert_eq!(false, is_pentagonal(13));
assert_eq!(false, is_pentagonal(0));
}
#[test]
fn test_is_hexagonal() {
assert!(is_hexagonal(1));
assert!(is_hexagonal(15));
assert_eq!(false, is_hexagonal(16));
assert_eq!(false, is_hexagonal(0));
}
#[bench]
fn bench_is_prime_composite(b: &mut Bencher) {
let n = black_box(12346);
b.iter(|| is_prime(n));
}
#[bench]
fn bench_is_prime_prime(b: &mut Bencher) {
let n = black_box(12347);
b.iter(|| is_prime(n));
}
#[bench]
fn bench_get_digits(b: &mut Bencher) {
b.iter(|| get_digits(12345));
}
#[bench]
fn bench_count_digits(b: &mut Bencher) {
let n = black_box(12345);
b.iter(|| count_digits(n));
}
#[bench]
fn bench_prime_sieve(b: &mut Bencher) {
b.iter(|| prime_sieve(12345));
}
#[bench]
fn bench_primes_upto(b: &mut Bencher) {
b.iter(|| primes_upto(12345));
}
#[bench]
fn bench_is_pandigital(b: &mut Bencher) {
b.iter(|| is_pandigital(12345, 1, 5));
}
#[bench]
fn bench_is_pentagonal(b: &mut Bencher) {
let n = black_box(12345);
b.iter(|| is_pentagonal(n));
}
}
|
is_pentagonal
|
identifier_name
|
lib.rs
|
// Copyright 2016 Peter Beard
// Distributed under the GNU GPL v2. For full terms, see the LICENSE file.
//
// Various functions that seem to get used a lot
#![feature(test)]
extern crate test;
use std::ops;
pub mod bigint;
/// Find the sum of a Vec
pub fn sum<T: Copy + ops::Add<T, Output=T>>(v: &[T], initial: T) -> T {
v.iter().fold(initial, |acc, &x| acc + x)
}
|
/// Determine whether a number is a pentagonal number
pub fn is_pentagonal(p: u64) -> bool {
let n = (1.0 + (1.0 + 24.0*(p as f64)).sqrt()) / 6.0;
n == n.floor()
}
/// Determine whether a number is a hexagonal number
pub fn is_hexagonal(p: u64) -> bool {
let n = (1.0 + (1.0 + 8.0*(p as f64)).sqrt()) / 4.0;
n == n.floor()
}
/// Determine whether a number is prime
pub fn is_prime(n: i64) -> bool {
// Negative numbers, zero, and one are not prime
if n < 2 {
return false;
}
// We don't need to look at factors > sqrt(n)
let max_factor = ((n as f64).sqrt() as i64) + 1;
for f in (2..max_factor).rev() {
if n % f == 0 {
return false;
}
}
true
}
/// Calculate the sum of all proper divisors of n
pub fn sum_of_divisors(n: u32) -> u32 {
if n < 2 {
return n
}
let max_factor = (n as f32).sqrt() as u32 + 1;
let mut sum = 1;
for i in 2..max_factor {
if n % i == 0 {
sum += i;
if i!= n / i {
sum += n / i;
}
}
}
sum
}
/// Split an integer into its digits
pub fn get_digits(n: u32) -> Vec<u32> {
let mut mut_n = n;
// 2^32 is only 10 digits
let mut digits: Vec<u32> = Vec::with_capacity(10);
while mut_n >= 1 {
digits.push(mut_n % 10);
mut_n /= 10;
}
digits.reverse();
digits
}
/// Count the number of digits in a number
pub fn count_digits(n: u32) -> u32 {
// This is about 10x faster than calculating log10(n)
let mut count = 0;
let mut step = 1;
while step <= n {
count += 1;
step *= 10;
}
count
}
/// Use the sieve of Eratosthenes to mark numbers as prime or composite in [0, n)
pub fn prime_sieve(n: u32) -> Vec<bool> {
let mut primes: Vec<bool> = vec![true; n as usize];
primes[0] = false;
primes[1] = false;
for p in 2..n {
if primes[p as usize] {
// If n is prime, all multiples of n are composite
let mut q = 2*p;
while q < n {
primes[q as usize] = false;
q += p;
}
}
}
primes
}
/// Get a list of all the prime numbers in [0, n)
pub fn primes_upto(n: u32) -> Vec<u32> {
let is_prime = prime_sieve(n+1);
// Use a linear approximation of Pi(n) to estimate the number of primes < n
let mut primes: Vec<u32> = Vec::with_capacity(((0.2f32 * (n as f32)) + 2.6f32) as usize);
for p in 2..n {
if is_prime[p as usize] {
primes.push(p);
}
}
primes
}
/// Determine whether a number is l..k pandigital
pub fn is_pandigital(n: u32, l: u32, k: u32) -> bool {
if l == k || k < l {
return false;
}
let mut digits: Vec<bool> = vec![false; (k + 1) as usize];
let mut value = 1;
for _ in l..(k + 1) {
let d = (n % (10*value))/value;
if d > k {
return false;
}
digits[d as usize] = true;
value *= 10;
}
for i in l..(k + 1) {
if!digits[i as usize] {
return false;
}
}
for i in 0..l {
if digits[i as usize] {
return false;
}
}
true
}
#[cfg(test)]
mod tests {
use super::*;
use test::{Bencher, black_box};
#[test]
fn test_sum() {
assert_eq!(15u8, sum::<u8>(&vec![1,2,3,4,5], 0));
assert_eq!(15u16, sum::<u16>(&vec![1,2,3,4,5], 0));
assert_eq!(15u32, sum::<u32>(&vec![1,2,3,4,5], 0));
assert_eq!(15u64, sum::<u64>(&vec![1,2,3,4,5], 0));
assert_eq!(15i8, sum::<i8>(&vec![1,2,3,4,5], 0));
assert_eq!(15i16, sum::<i16>(&vec![1,2,3,4,5], 0));
assert_eq!(15i32, sum::<i32>(&vec![1,2,3,4,5], 0));
assert_eq!(15i64, sum::<i64>(&vec![1,2,3,4,5], 0));
assert_eq!(15f32, sum::<f32>(&vec![1.0,2.0,3.0,4.0,5.0], 0.0));
assert_eq!(15f64, sum::<f64>(&vec![1.0,2.0,3.0,4.0,5.0], 0.0));
}
#[test]
fn is_2_prime() {
assert!(is_prime(2));
}
#[test]
fn is_13_prime() {
assert!(is_prime(13));
}
#[test]
fn is_1_prime() {
assert_eq!(false, is_prime(1));
}
#[test]
fn is_0_prime() {
assert_eq!(false, is_prime(0));
}
#[test]
fn sum_of_divisors_0() {
assert_eq!(0, sum_of_divisors(0));
}
#[test]
fn sum_of_divisors_10() {
assert_eq!(8, sum_of_divisors(10));
}
#[test]
fn sum_of_divisors_28() {
assert_eq!(28, sum_of_divisors(28));
}
#[test]
fn test_get_digits() {
let n = 12345;
let digits = vec![1,2,3,4,5];
assert_eq!(digits, get_digits(n));
}
#[test]
fn test_count_digits() {
assert_eq!(2, count_digits(10));
assert_eq!(5, count_digits(12345));
}
#[test]
fn test_prime_sieve() {
let primes = vec![false, false, true, true, false, true, false, true, false, false];
assert_eq!(primes, prime_sieve(10));
}
#[test]
fn test_primes_upto() {
let primes = vec![2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31];
assert_eq!(primes, primes_upto(32));
}
#[test]
fn test_is_pandigital() {
assert_eq!(false, is_pandigital(12345678, 1, 9));
assert_eq!(false, is_pandigital(1234567890, 1, 9));
assert!(is_pandigital(1234, 1, 4));
assert!(is_pandigital(3456, 3, 6));
assert_eq!(false, is_pandigital(1234, 1, 1));
assert_eq!(false, is_pandigital(1234, 4, 1));
}
#[test]
fn test_is_triangular() {
assert!(is_triangular(1));
assert!(is_triangular(10));
assert_eq!(false, is_triangular(11));
assert_eq!(false, is_triangular(0));
}
#[test]
fn test_is_pentagonal() {
assert!(is_pentagonal(1));
assert!(is_pentagonal(12));
assert_eq!(false, is_pentagonal(13));
assert_eq!(false, is_pentagonal(0));
}
#[test]
fn test_is_hexagonal() {
assert!(is_hexagonal(1));
assert!(is_hexagonal(15));
assert_eq!(false, is_hexagonal(16));
assert_eq!(false, is_hexagonal(0));
}
#[bench]
fn bench_is_prime_composite(b: &mut Bencher) {
let n = black_box(12346);
b.iter(|| is_prime(n));
}
#[bench]
fn bench_is_prime_prime(b: &mut Bencher) {
let n = black_box(12347);
b.iter(|| is_prime(n));
}
#[bench]
fn bench_get_digits(b: &mut Bencher) {
b.iter(|| get_digits(12345));
}
#[bench]
fn bench_count_digits(b: &mut Bencher) {
let n = black_box(12345);
b.iter(|| count_digits(n));
}
#[bench]
fn bench_prime_sieve(b: &mut Bencher) {
b.iter(|| prime_sieve(12345));
}
#[bench]
fn bench_primes_upto(b: &mut Bencher) {
b.iter(|| primes_upto(12345));
}
#[bench]
fn bench_is_pandigital(b: &mut Bencher) {
b.iter(|| is_pandigital(12345, 1, 5));
}
#[bench]
fn bench_is_pentagonal(b: &mut Bencher) {
let n = black_box(12345);
b.iter(|| is_pentagonal(n));
}
}
|
/// Determine whether a number is a triangular number
pub fn is_triangular(p: u64) -> bool {
let n = (-1.0 + (1.0 + 8.0*(p as f64)).sqrt()) / 2.0;
n == n.floor() && n > 0.0
}
|
random_line_split
|
lib.rs
|
// Copyright 2016 Peter Beard
// Distributed under the GNU GPL v2. For full terms, see the LICENSE file.
//
// Various functions that seem to get used a lot
#![feature(test)]
extern crate test;
use std::ops;
pub mod bigint;
/// Find the sum of a Vec
pub fn sum<T: Copy + ops::Add<T, Output=T>>(v: &[T], initial: T) -> T {
v.iter().fold(initial, |acc, &x| acc + x)
}
/// Determine whether a number is a triangular number
pub fn is_triangular(p: u64) -> bool
|
/// Determine whether a number is a pentagonal number
pub fn is_pentagonal(p: u64) -> bool {
let n = (1.0 + (1.0 + 24.0*(p as f64)).sqrt()) / 6.0;
n == n.floor()
}
/// Determine whether a number is a hexagonal number
pub fn is_hexagonal(p: u64) -> bool {
let n = (1.0 + (1.0 + 8.0*(p as f64)).sqrt()) / 4.0;
n == n.floor()
}
/// Determine whether a number is prime
pub fn is_prime(n: i64) -> bool {
// Negative numbers, zero, and one are not prime
if n < 2 {
return false;
}
// We don't need to look at factors > sqrt(n)
let max_factor = ((n as f64).sqrt() as i64) + 1;
for f in (2..max_factor).rev() {
if n % f == 0 {
return false;
}
}
true
}
/// Calculate the sum of all proper divisors of n
pub fn sum_of_divisors(n: u32) -> u32 {
if n < 2 {
return n
}
let max_factor = (n as f32).sqrt() as u32 + 1;
let mut sum = 1;
for i in 2..max_factor {
if n % i == 0 {
sum += i;
if i!= n / i {
sum += n / i;
}
}
}
sum
}
/// Split an integer into its digits
pub fn get_digits(n: u32) -> Vec<u32> {
let mut mut_n = n;
// 2^32 is only 10 digits
let mut digits: Vec<u32> = Vec::with_capacity(10);
while mut_n >= 1 {
digits.push(mut_n % 10);
mut_n /= 10;
}
digits.reverse();
digits
}
/// Count the number of digits in a number
pub fn count_digits(n: u32) -> u32 {
// This is about 10x faster than calculating log10(n)
let mut count = 0;
let mut step = 1;
while step <= n {
count += 1;
step *= 10;
}
count
}
/// Use the sieve of Eratosthenes to mark numbers as prime or composite in [0, n)
pub fn prime_sieve(n: u32) -> Vec<bool> {
let mut primes: Vec<bool> = vec![true; n as usize];
primes[0] = false;
primes[1] = false;
for p in 2..n {
if primes[p as usize] {
// If n is prime, all multiples of n are composite
let mut q = 2*p;
while q < n {
primes[q as usize] = false;
q += p;
}
}
}
primes
}
/// Get a list of all the prime numbers in [0, n)
pub fn primes_upto(n: u32) -> Vec<u32> {
let is_prime = prime_sieve(n+1);
// Use a linear approximation of Pi(n) to estimate the number of primes < n
let mut primes: Vec<u32> = Vec::with_capacity(((0.2f32 * (n as f32)) + 2.6f32) as usize);
for p in 2..n {
if is_prime[p as usize] {
primes.push(p);
}
}
primes
}
/// Determine whether a number is l..k pandigital
pub fn is_pandigital(n: u32, l: u32, k: u32) -> bool {
if l == k || k < l {
return false;
}
let mut digits: Vec<bool> = vec![false; (k + 1) as usize];
let mut value = 1;
for _ in l..(k + 1) {
let d = (n % (10*value))/value;
if d > k {
return false;
}
digits[d as usize] = true;
value *= 10;
}
for i in l..(k + 1) {
if!digits[i as usize] {
return false;
}
}
for i in 0..l {
if digits[i as usize] {
return false;
}
}
true
}
#[cfg(test)]
mod tests {
use super::*;
use test::{Bencher, black_box};
#[test]
fn test_sum() {
assert_eq!(15u8, sum::<u8>(&vec![1,2,3,4,5], 0));
assert_eq!(15u16, sum::<u16>(&vec![1,2,3,4,5], 0));
assert_eq!(15u32, sum::<u32>(&vec![1,2,3,4,5], 0));
assert_eq!(15u64, sum::<u64>(&vec![1,2,3,4,5], 0));
assert_eq!(15i8, sum::<i8>(&vec![1,2,3,4,5], 0));
assert_eq!(15i16, sum::<i16>(&vec![1,2,3,4,5], 0));
assert_eq!(15i32, sum::<i32>(&vec![1,2,3,4,5], 0));
assert_eq!(15i64, sum::<i64>(&vec![1,2,3,4,5], 0));
assert_eq!(15f32, sum::<f32>(&vec![1.0,2.0,3.0,4.0,5.0], 0.0));
assert_eq!(15f64, sum::<f64>(&vec![1.0,2.0,3.0,4.0,5.0], 0.0));
}
#[test]
fn is_2_prime() {
assert!(is_prime(2));
}
#[test]
fn is_13_prime() {
assert!(is_prime(13));
}
#[test]
fn is_1_prime() {
assert_eq!(false, is_prime(1));
}
#[test]
fn is_0_prime() {
assert_eq!(false, is_prime(0));
}
#[test]
fn sum_of_divisors_0() {
assert_eq!(0, sum_of_divisors(0));
}
#[test]
fn sum_of_divisors_10() {
assert_eq!(8, sum_of_divisors(10));
}
#[test]
fn sum_of_divisors_28() {
assert_eq!(28, sum_of_divisors(28));
}
#[test]
fn test_get_digits() {
let n = 12345;
let digits = vec![1,2,3,4,5];
assert_eq!(digits, get_digits(n));
}
#[test]
fn test_count_digits() {
assert_eq!(2, count_digits(10));
assert_eq!(5, count_digits(12345));
}
#[test]
fn test_prime_sieve() {
let primes = vec![false, false, true, true, false, true, false, true, false, false];
assert_eq!(primes, prime_sieve(10));
}
#[test]
fn test_primes_upto() {
let primes = vec![2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31];
assert_eq!(primes, primes_upto(32));
}
#[test]
fn test_is_pandigital() {
assert_eq!(false, is_pandigital(12345678, 1, 9));
assert_eq!(false, is_pandigital(1234567890, 1, 9));
assert!(is_pandigital(1234, 1, 4));
assert!(is_pandigital(3456, 3, 6));
assert_eq!(false, is_pandigital(1234, 1, 1));
assert_eq!(false, is_pandigital(1234, 4, 1));
}
#[test]
fn test_is_triangular() {
assert!(is_triangular(1));
assert!(is_triangular(10));
assert_eq!(false, is_triangular(11));
assert_eq!(false, is_triangular(0));
}
#[test]
fn test_is_pentagonal() {
assert!(is_pentagonal(1));
assert!(is_pentagonal(12));
assert_eq!(false, is_pentagonal(13));
assert_eq!(false, is_pentagonal(0));
}
#[test]
fn test_is_hexagonal() {
assert!(is_hexagonal(1));
assert!(is_hexagonal(15));
assert_eq!(false, is_hexagonal(16));
assert_eq!(false, is_hexagonal(0));
}
#[bench]
fn bench_is_prime_composite(b: &mut Bencher) {
let n = black_box(12346);
b.iter(|| is_prime(n));
}
#[bench]
fn bench_is_prime_prime(b: &mut Bencher) {
let n = black_box(12347);
b.iter(|| is_prime(n));
}
#[bench]
fn bench_get_digits(b: &mut Bencher) {
b.iter(|| get_digits(12345));
}
#[bench]
fn bench_count_digits(b: &mut Bencher) {
let n = black_box(12345);
b.iter(|| count_digits(n));
}
#[bench]
fn bench_prime_sieve(b: &mut Bencher) {
b.iter(|| prime_sieve(12345));
}
#[bench]
fn bench_primes_upto(b: &mut Bencher) {
b.iter(|| primes_upto(12345));
}
#[bench]
fn bench_is_pandigital(b: &mut Bencher) {
b.iter(|| is_pandigital(12345, 1, 5));
}
#[bench]
fn bench_is_pentagonal(b: &mut Bencher) {
let n = black_box(12345);
b.iter(|| is_pentagonal(n));
}
}
|
{
let n = (-1.0 + (1.0 + 8.0*(p as f64)).sqrt()) / 2.0;
n == n.floor() && n > 0.0
}
|
identifier_body
|
lib.rs
|
// Copyright 2016 Peter Beard
// Distributed under the GNU GPL v2. For full terms, see the LICENSE file.
//
// Various functions that seem to get used a lot
#![feature(test)]
extern crate test;
use std::ops;
pub mod bigint;
/// Find the sum of a Vec
pub fn sum<T: Copy + ops::Add<T, Output=T>>(v: &[T], initial: T) -> T {
v.iter().fold(initial, |acc, &x| acc + x)
}
/// Determine whether a number is a triangular number
pub fn is_triangular(p: u64) -> bool {
let n = (-1.0 + (1.0 + 8.0*(p as f64)).sqrt()) / 2.0;
n == n.floor() && n > 0.0
}
/// Determine whether a number is a pentagonal number
pub fn is_pentagonal(p: u64) -> bool {
let n = (1.0 + (1.0 + 24.0*(p as f64)).sqrt()) / 6.0;
n == n.floor()
}
/// Determine whether a number is a hexagonal number
pub fn is_hexagonal(p: u64) -> bool {
let n = (1.0 + (1.0 + 8.0*(p as f64)).sqrt()) / 4.0;
n == n.floor()
}
/// Determine whether a number is prime
pub fn is_prime(n: i64) -> bool {
// Negative numbers, zero, and one are not prime
if n < 2 {
return false;
}
// We don't need to look at factors > sqrt(n)
let max_factor = ((n as f64).sqrt() as i64) + 1;
for f in (2..max_factor).rev() {
if n % f == 0 {
return false;
}
}
true
}
/// Calculate the sum of all proper divisors of n
pub fn sum_of_divisors(n: u32) -> u32 {
if n < 2 {
return n
}
let max_factor = (n as f32).sqrt() as u32 + 1;
let mut sum = 1;
for i in 2..max_factor {
if n % i == 0 {
sum += i;
if i!= n / i {
sum += n / i;
}
}
}
sum
}
/// Split an integer into its digits
pub fn get_digits(n: u32) -> Vec<u32> {
let mut mut_n = n;
// 2^32 is only 10 digits
let mut digits: Vec<u32> = Vec::with_capacity(10);
while mut_n >= 1 {
digits.push(mut_n % 10);
mut_n /= 10;
}
digits.reverse();
digits
}
/// Count the number of digits in a number
pub fn count_digits(n: u32) -> u32 {
// This is about 10x faster than calculating log10(n)
let mut count = 0;
let mut step = 1;
while step <= n {
count += 1;
step *= 10;
}
count
}
/// Use the sieve of Eratosthenes to mark numbers as prime or composite in [0, n)
pub fn prime_sieve(n: u32) -> Vec<bool> {
let mut primes: Vec<bool> = vec![true; n as usize];
primes[0] = false;
primes[1] = false;
for p in 2..n {
if primes[p as usize] {
// If n is prime, all multiples of n are composite
let mut q = 2*p;
while q < n {
primes[q as usize] = false;
q += p;
}
}
}
primes
}
/// Get a list of all the prime numbers in [0, n)
pub fn primes_upto(n: u32) -> Vec<u32> {
let is_prime = prime_sieve(n+1);
// Use a linear approximation of Pi(n) to estimate the number of primes < n
let mut primes: Vec<u32> = Vec::with_capacity(((0.2f32 * (n as f32)) + 2.6f32) as usize);
for p in 2..n {
if is_prime[p as usize] {
primes.push(p);
}
}
primes
}
/// Determine whether a number is l..k pandigital
pub fn is_pandigital(n: u32, l: u32, k: u32) -> bool {
if l == k || k < l {
return false;
}
let mut digits: Vec<bool> = vec![false; (k + 1) as usize];
let mut value = 1;
for _ in l..(k + 1) {
let d = (n % (10*value))/value;
if d > k
|
digits[d as usize] = true;
value *= 10;
}
for i in l..(k + 1) {
if!digits[i as usize] {
return false;
}
}
for i in 0..l {
if digits[i as usize] {
return false;
}
}
true
}
#[cfg(test)]
mod tests {
use super::*;
use test::{Bencher, black_box};
#[test]
fn test_sum() {
assert_eq!(15u8, sum::<u8>(&vec![1,2,3,4,5], 0));
assert_eq!(15u16, sum::<u16>(&vec![1,2,3,4,5], 0));
assert_eq!(15u32, sum::<u32>(&vec![1,2,3,4,5], 0));
assert_eq!(15u64, sum::<u64>(&vec![1,2,3,4,5], 0));
assert_eq!(15i8, sum::<i8>(&vec![1,2,3,4,5], 0));
assert_eq!(15i16, sum::<i16>(&vec![1,2,3,4,5], 0));
assert_eq!(15i32, sum::<i32>(&vec![1,2,3,4,5], 0));
assert_eq!(15i64, sum::<i64>(&vec![1,2,3,4,5], 0));
assert_eq!(15f32, sum::<f32>(&vec![1.0,2.0,3.0,4.0,5.0], 0.0));
assert_eq!(15f64, sum::<f64>(&vec![1.0,2.0,3.0,4.0,5.0], 0.0));
}
#[test]
fn is_2_prime() {
assert!(is_prime(2));
}
#[test]
fn is_13_prime() {
assert!(is_prime(13));
}
#[test]
fn is_1_prime() {
assert_eq!(false, is_prime(1));
}
#[test]
fn is_0_prime() {
assert_eq!(false, is_prime(0));
}
#[test]
fn sum_of_divisors_0() {
assert_eq!(0, sum_of_divisors(0));
}
#[test]
fn sum_of_divisors_10() {
assert_eq!(8, sum_of_divisors(10));
}
#[test]
fn sum_of_divisors_28() {
assert_eq!(28, sum_of_divisors(28));
}
#[test]
fn test_get_digits() {
let n = 12345;
let digits = vec![1,2,3,4,5];
assert_eq!(digits, get_digits(n));
}
#[test]
fn test_count_digits() {
assert_eq!(2, count_digits(10));
assert_eq!(5, count_digits(12345));
}
#[test]
fn test_prime_sieve() {
let primes = vec![false, false, true, true, false, true, false, true, false, false];
assert_eq!(primes, prime_sieve(10));
}
#[test]
fn test_primes_upto() {
let primes = vec![2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31];
assert_eq!(primes, primes_upto(32));
}
#[test]
fn test_is_pandigital() {
assert_eq!(false, is_pandigital(12345678, 1, 9));
assert_eq!(false, is_pandigital(1234567890, 1, 9));
assert!(is_pandigital(1234, 1, 4));
assert!(is_pandigital(3456, 3, 6));
assert_eq!(false, is_pandigital(1234, 1, 1));
assert_eq!(false, is_pandigital(1234, 4, 1));
}
#[test]
fn test_is_triangular() {
assert!(is_triangular(1));
assert!(is_triangular(10));
assert_eq!(false, is_triangular(11));
assert_eq!(false, is_triangular(0));
}
#[test]
fn test_is_pentagonal() {
assert!(is_pentagonal(1));
assert!(is_pentagonal(12));
assert_eq!(false, is_pentagonal(13));
assert_eq!(false, is_pentagonal(0));
}
#[test]
fn test_is_hexagonal() {
assert!(is_hexagonal(1));
assert!(is_hexagonal(15));
assert_eq!(false, is_hexagonal(16));
assert_eq!(false, is_hexagonal(0));
}
#[bench]
fn bench_is_prime_composite(b: &mut Bencher) {
let n = black_box(12346);
b.iter(|| is_prime(n));
}
#[bench]
fn bench_is_prime_prime(b: &mut Bencher) {
let n = black_box(12347);
b.iter(|| is_prime(n));
}
#[bench]
fn bench_get_digits(b: &mut Bencher) {
b.iter(|| get_digits(12345));
}
#[bench]
fn bench_count_digits(b: &mut Bencher) {
let n = black_box(12345);
b.iter(|| count_digits(n));
}
#[bench]
fn bench_prime_sieve(b: &mut Bencher) {
b.iter(|| prime_sieve(12345));
}
#[bench]
fn bench_primes_upto(b: &mut Bencher) {
b.iter(|| primes_upto(12345));
}
#[bench]
fn bench_is_pandigital(b: &mut Bencher) {
b.iter(|| is_pandigital(12345, 1, 5));
}
#[bench]
fn bench_is_pentagonal(b: &mut Bencher) {
let n = black_box(12345);
b.iter(|| is_pentagonal(n));
}
}
|
{
return false;
}
|
conditional_block
|
resolution.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Resolution values:
//!
//! https://drafts.csswg.org/css-values/#resolution
use std::fmt::{self, Write};
use style_traits::{CssWriter, ToCss};
use values::CSSFloat;
use values::computed::{Context, ToComputedValue};
use values::specified;
/// A computed `<resolution>`.
pub struct
|
(CSSFloat);
impl Resolution {
/// Returns this resolution value as dppx.
#[inline]
pub fn dppx(&self) -> CSSFloat {
self.0
}
}
impl ToComputedValue for specified::Resolution {
type ComputedValue = Resolution;
#[inline]
fn to_computed_value(&self, _: &Context) -> Self::ComputedValue {
Resolution(self.to_dppx())
}
#[inline]
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
specified::Resolution::Dppx(computed.dppx())
}
}
impl ToCss for Resolution {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: fmt::Write,
{
self.dppx().to_css(dest)?;
dest.write_str("dppx")
}
}
|
Resolution
|
identifier_name
|
resolution.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Resolution values:
//!
//! https://drafts.csswg.org/css-values/#resolution
use std::fmt::{self, Write};
use style_traits::{CssWriter, ToCss};
use values::CSSFloat;
use values::computed::{Context, ToComputedValue};
use values::specified;
/// A computed `<resolution>`.
pub struct Resolution(CSSFloat);
impl Resolution {
/// Returns this resolution value as dppx.
#[inline]
pub fn dppx(&self) -> CSSFloat
|
}
impl ToComputedValue for specified::Resolution {
type ComputedValue = Resolution;
#[inline]
fn to_computed_value(&self, _: &Context) -> Self::ComputedValue {
Resolution(self.to_dppx())
}
#[inline]
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
specified::Resolution::Dppx(computed.dppx())
}
}
impl ToCss for Resolution {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: fmt::Write,
{
self.dppx().to_css(dest)?;
dest.write_str("dppx")
}
}
|
{
self.0
}
|
identifier_body
|
resolution.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Resolution values:
//!
//! https://drafts.csswg.org/css-values/#resolution
use std::fmt::{self, Write};
use style_traits::{CssWriter, ToCss};
use values::CSSFloat;
use values::computed::{Context, ToComputedValue};
use values::specified;
/// A computed `<resolution>`.
pub struct Resolution(CSSFloat);
impl Resolution {
/// Returns this resolution value as dppx.
#[inline]
pub fn dppx(&self) -> CSSFloat {
|
impl ToComputedValue for specified::Resolution {
type ComputedValue = Resolution;
#[inline]
fn to_computed_value(&self, _: &Context) -> Self::ComputedValue {
Resolution(self.to_dppx())
}
#[inline]
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
specified::Resolution::Dppx(computed.dppx())
}
}
impl ToCss for Resolution {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: fmt::Write,
{
self.dppx().to_css(dest)?;
dest.write_str("dppx")
}
}
|
self.0
}
}
|
random_line_split
|
fixi.rs
|
use {
fixity::{
fixity::Builder,
path::Path,
value::{Key, Value},
Config, Fixity,
},
std::path::PathBuf,
structopt::StructOpt,
};
#[derive(Debug, StructOpt)]
#[structopt(name = "fixi", about = "fixity content management")]
struct Opt {
#[structopt(flatten)]
fixi_opt: FixiOpt,
#[structopt(subcommand)]
subcmd: Command,
}
/// An temporary config setting up Fixi with the limited in-dev options
/// it has at the moment.
///
/// In the near future this will be revamped to support complex configuration,
/// which may or may not be managed by StructOpt.
#[derive(Debug, StructOpt)]
struct FixiOpt {
// #[structopt(long, env = "GLOBAL_FIXI_DIR")]
// pub global_fixi_dir: Option<PathBuf>,
#[structopt(long, env = "FIXI_DIR_NAME")]
pub fixi_dir_name: Option<PathBuf>,
#[structopt(long, env = "FIXI_BASE_PATH")]
pub base_path: Option<PathBuf>,
#[structopt(long, env = "FIXI_WORKSPACE", default_value = "default")]
pub workspace: String,
}
#[derive(Debug, StructOpt)]
enum Command {
Init,
/// A Map interface to Fixity data.
///
/// Map is a primarily low level interface, enabling insight and mutation on the raw
/// Key-Value format of Fixity.
Map {
/// The destination to write a `Value` to.
#[structopt(short = "p", long = "path", parse(try_from_str = Path::from_cli_str), default_value = "")]
path: Path,
#[structopt(subcommand)]
subcmd: MapSubcmd,
},
/// A raw bytes interface to Fixity, allowing reading and writing of arbitrary bytes at the
/// provided `Path` and deduplicated via content defined chunking.
Bytes {
/// The destination to write bytes to.
#[structopt(short = "p", long = "path", parse(try_from_str = Path::from_cli_str), default_value = "")]
path: Path,
#[structopt(subcommand)]
subcmd: BytesSubcmd,
},
}
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("error: {0}")]
User(String),
#[error("fixity error: {0}")]
Fixity(#[from] fixity::Error),
#[error("fixity storage error: {0}")]
StorageError(#[from] fixity::core::storage::Error),
}
#[tokio::main]
async fn main() -> Result<(), Error> {
env_logger::from_env(env_logger::Env::default().default_filter_or("error")).init();
let opt = Opt::from_args();
let fixi = {
let FixiOpt {
fixi_dir_name,
base_path,
workspace,
} = opt.fixi_opt;
let config = Config::builder()
.with_fixi_dir_name(fixi_dir_name)
.with_base_path(base_path)
.with_workspace_name(Some(workspace));
let builder = Fixity::builder().with_config(config.clone());
match opt.subcmd {
Command::Init => {
return cmd_init(builder).await;
},
_ => builder.open().await?,
}
};
match opt.subcmd {
Command::Init => unreachable!("matched above"),
Command::Map { path, subcmd } => cmd_map(fixi, path, subcmd).await,
Command::Bytes { path, subcmd } => cmd_bytes(fixi, path, subcmd).await,
}
}
async fn cmd_init(builder: Builder) -> Result<(), Error> {
let (_, config) = builder.init_config().await?;
println!(
"created Fixity repository at: {}",
config.fixi_dir_path.to_string_lossy()
);
Ok(())
}
#[derive(Debug, StructOpt)]
enum MapSubcmd {
Get {
/// The `Key` to get a `Value` from.
#[structopt(name = "KEY", parse(try_from_str = Key::from_cli_str))]
key: Key,
},
Put {
/// Optionally immediately commit this and any staged changes.
#[structopt(short = "c", long = "commit")]
commit: bool,
/// The `Key` to write a `Value` or Bytes to.
#[structopt(name = "KEY", parse(try_from_str = Key::from_cli_str))]
key: Key,
/// Write the [`Value`] to the given [`Key`].
#[structopt(
name = "VALUE", parse(try_from_str = Value::from_cli_str),
)]
value: Value,
},
Ls {
#[structopt(short = "s", long = "start", parse(try_from_str = Key::from_cli_str))]
start: Option<Key>,
#[structopt(short = "e", long = "end", parse(try_from_str = Key::from_cli_str))]
end: Option<Key>,
},
}
async fn cmd_map(fixi: Fixity, path: Path, subcmd: MapSubcmd) -> Result<(), Error> {
match subcmd {
MapSubcmd::Get { key } => cmd_map_get(fixi, path, key).await,
MapSubcmd::Put { commit, key, value } => cmd_map_put(fixi, path, commit, key, value).await,
MapSubcmd::Ls { start, end } => cmd_map_ls(fixi, path, start, end).await,
}
}
async fn cmd_map_get(fixi: Fixity, path: Path, key: Key) -> Result<(), Error> {
let map = fixi.map(path);
let v = map.get(key).await?;
println!("{:?}", v);
Ok(())
}
async fn cmd_map_put(
fixi: Fixity,
path: Path,
commit: bool,
key: Key,
value: Value,
) -> Result<(), Error> {
let mut map = fixi.map(path);
map.insert(key, value).await?;
if commit {
let addr = map.commit().await?;
println!("{:?}", addr);
}
Ok(())
}
async fn cmd_map_ls(
fixi: Fixity,
path: Path,
start: Option<Key>,
end: Option<Key>,
) -> Result<(), Error> {
let map = fixi.map(path);
let mut iter = match (start, end) {
(Some(start), Some(end)) => map.iter(start..end).await?,
(Some(start), None) => map.iter(start..).await?,
(None, Some(end)) => map.iter(..end).await?,
(None, None) => map.iter(..).await?,
};
while let Some(res) = iter.next() {
let (key, value) = res?;
println!("{}={}", key, value);
}
Ok(())
}
#[derive(Debug, StructOpt)]
enum BytesSubcmd {
Get,
Put {
/// Optionally immediately commit this and any staged changes.
#[structopt(short = "c", long = "commit")]
commit: bool,
},
}
async fn cmd_bytes(fixi: Fixity, path: Path, subcmd: BytesSubcmd) -> Result<(), Error> {
match subcmd {
BytesSubcmd::Get => cmd_bytes_get(fixi, path).await,
BytesSubcmd::Put { commit } => cmd_bytes_put(fixi, path, commit).await,
}
}
async fn
|
(fixi: Fixity, path: Path) -> Result<(), Error> {
let stdout = tokio::io::stdout();
let bytes = fixi.bytes(path)?;
let _ = bytes.read(stdout).await?;
Ok(())
}
async fn cmd_bytes_put(fixi: Fixity, path: Path, commit: bool) -> Result<(), Error> {
if path.len() == 0 {
return Err(Error::User(
"cannot get/put bytes to root of fixity repository".to_owned(),
));
}
// TODO: tokio docs recommend against this for interactive uses[1], so this
// should be fixed eventually - when interactivity is prioritized a bit more.
//
// Excerpt for context:
//
// > This handle is best used for non-interactive uses, such as when a file is piped
// > into the application. For technical reasons, stdin is implemented by using an ordinary
// > blocking read on a separate thread, and it is impossible to cancel that read.
// > This can make shutdown of the runtime hang until the user presses enter.
// >
// > For interactive uses, it is recommended to spawn a thread dedicated to user input and
// > use blocking IO directly in that thread.
//
// [1]: https://docs.rs/tokio/1.2.0/tokio/io/struct.Stdin.html
let stdin = tokio::io::stdin();
let bytes = fixi.bytes(path)?;
let _ = bytes.write(stdin).await?;
if commit {
let addr = bytes.commit().await?;
println!("{:?}", addr);
} else {
println!("bytes staged");
}
Ok(())
}
|
cmd_bytes_get
|
identifier_name
|
fixi.rs
|
use {
fixity::{
fixity::Builder,
path::Path,
value::{Key, Value},
Config, Fixity,
},
std::path::PathBuf,
structopt::StructOpt,
};
#[derive(Debug, StructOpt)]
#[structopt(name = "fixi", about = "fixity content management")]
struct Opt {
#[structopt(flatten)]
fixi_opt: FixiOpt,
#[structopt(subcommand)]
subcmd: Command,
}
/// An temporary config setting up Fixi with the limited in-dev options
/// it has at the moment.
///
/// In the near future this will be revamped to support complex configuration,
/// which may or may not be managed by StructOpt.
#[derive(Debug, StructOpt)]
struct FixiOpt {
// #[structopt(long, env = "GLOBAL_FIXI_DIR")]
// pub global_fixi_dir: Option<PathBuf>,
#[structopt(long, env = "FIXI_DIR_NAME")]
pub fixi_dir_name: Option<PathBuf>,
#[structopt(long, env = "FIXI_BASE_PATH")]
pub base_path: Option<PathBuf>,
#[structopt(long, env = "FIXI_WORKSPACE", default_value = "default")]
pub workspace: String,
}
#[derive(Debug, StructOpt)]
enum Command {
Init,
/// A Map interface to Fixity data.
///
/// Map is a primarily low level interface, enabling insight and mutation on the raw
/// Key-Value format of Fixity.
Map {
/// The destination to write a `Value` to.
#[structopt(short = "p", long = "path", parse(try_from_str = Path::from_cli_str), default_value = "")]
path: Path,
#[structopt(subcommand)]
subcmd: MapSubcmd,
},
/// A raw bytes interface to Fixity, allowing reading and writing of arbitrary bytes at the
/// provided `Path` and deduplicated via content defined chunking.
Bytes {
/// The destination to write bytes to.
#[structopt(short = "p", long = "path", parse(try_from_str = Path::from_cli_str), default_value = "")]
path: Path,
#[structopt(subcommand)]
subcmd: BytesSubcmd,
},
}
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("error: {0}")]
User(String),
#[error("fixity error: {0}")]
Fixity(#[from] fixity::Error),
#[error("fixity storage error: {0}")]
StorageError(#[from] fixity::core::storage::Error),
}
#[tokio::main]
async fn main() -> Result<(), Error> {
env_logger::from_env(env_logger::Env::default().default_filter_or("error")).init();
let opt = Opt::from_args();
let fixi = {
let FixiOpt {
fixi_dir_name,
base_path,
workspace,
} = opt.fixi_opt;
let config = Config::builder()
.with_fixi_dir_name(fixi_dir_name)
.with_base_path(base_path)
.with_workspace_name(Some(workspace));
let builder = Fixity::builder().with_config(config.clone());
match opt.subcmd {
Command::Init => {
return cmd_init(builder).await;
},
_ => builder.open().await?,
}
};
match opt.subcmd {
Command::Init => unreachable!("matched above"),
Command::Map { path, subcmd } => cmd_map(fixi, path, subcmd).await,
Command::Bytes { path, subcmd } => cmd_bytes(fixi, path, subcmd).await,
}
}
async fn cmd_init(builder: Builder) -> Result<(), Error> {
let (_, config) = builder.init_config().await?;
println!(
"created Fixity repository at: {}",
config.fixi_dir_path.to_string_lossy()
);
Ok(())
}
#[derive(Debug, StructOpt)]
enum MapSubcmd {
Get {
/// The `Key` to get a `Value` from.
#[structopt(name = "KEY", parse(try_from_str = Key::from_cli_str))]
key: Key,
},
Put {
/// Optionally immediately commit this and any staged changes.
#[structopt(short = "c", long = "commit")]
commit: bool,
/// The `Key` to write a `Value` or Bytes to.
#[structopt(name = "KEY", parse(try_from_str = Key::from_cli_str))]
key: Key,
/// Write the [`Value`] to the given [`Key`].
#[structopt(
name = "VALUE", parse(try_from_str = Value::from_cli_str),
)]
|
Ls {
#[structopt(short = "s", long = "start", parse(try_from_str = Key::from_cli_str))]
start: Option<Key>,
#[structopt(short = "e", long = "end", parse(try_from_str = Key::from_cli_str))]
end: Option<Key>,
},
}
async fn cmd_map(fixi: Fixity, path: Path, subcmd: MapSubcmd) -> Result<(), Error> {
match subcmd {
MapSubcmd::Get { key } => cmd_map_get(fixi, path, key).await,
MapSubcmd::Put { commit, key, value } => cmd_map_put(fixi, path, commit, key, value).await,
MapSubcmd::Ls { start, end } => cmd_map_ls(fixi, path, start, end).await,
}
}
async fn cmd_map_get(fixi: Fixity, path: Path, key: Key) -> Result<(), Error> {
let map = fixi.map(path);
let v = map.get(key).await?;
println!("{:?}", v);
Ok(())
}
async fn cmd_map_put(
fixi: Fixity,
path: Path,
commit: bool,
key: Key,
value: Value,
) -> Result<(), Error> {
let mut map = fixi.map(path);
map.insert(key, value).await?;
if commit {
let addr = map.commit().await?;
println!("{:?}", addr);
}
Ok(())
}
async fn cmd_map_ls(
fixi: Fixity,
path: Path,
start: Option<Key>,
end: Option<Key>,
) -> Result<(), Error> {
let map = fixi.map(path);
let mut iter = match (start, end) {
(Some(start), Some(end)) => map.iter(start..end).await?,
(Some(start), None) => map.iter(start..).await?,
(None, Some(end)) => map.iter(..end).await?,
(None, None) => map.iter(..).await?,
};
while let Some(res) = iter.next() {
let (key, value) = res?;
println!("{}={}", key, value);
}
Ok(())
}
#[derive(Debug, StructOpt)]
enum BytesSubcmd {
Get,
Put {
/// Optionally immediately commit this and any staged changes.
#[structopt(short = "c", long = "commit")]
commit: bool,
},
}
async fn cmd_bytes(fixi: Fixity, path: Path, subcmd: BytesSubcmd) -> Result<(), Error> {
match subcmd {
BytesSubcmd::Get => cmd_bytes_get(fixi, path).await,
BytesSubcmd::Put { commit } => cmd_bytes_put(fixi, path, commit).await,
}
}
async fn cmd_bytes_get(fixi: Fixity, path: Path) -> Result<(), Error> {
let stdout = tokio::io::stdout();
let bytes = fixi.bytes(path)?;
let _ = bytes.read(stdout).await?;
Ok(())
}
async fn cmd_bytes_put(fixi: Fixity, path: Path, commit: bool) -> Result<(), Error> {
if path.len() == 0 {
return Err(Error::User(
"cannot get/put bytes to root of fixity repository".to_owned(),
));
}
// TODO: tokio docs recommend against this for interactive uses[1], so this
// should be fixed eventually - when interactivity is prioritized a bit more.
//
// Excerpt for context:
//
// > This handle is best used for non-interactive uses, such as when a file is piped
// > into the application. For technical reasons, stdin is implemented by using an ordinary
// > blocking read on a separate thread, and it is impossible to cancel that read.
// > This can make shutdown of the runtime hang until the user presses enter.
// >
// > For interactive uses, it is recommended to spawn a thread dedicated to user input and
// > use blocking IO directly in that thread.
//
// [1]: https://docs.rs/tokio/1.2.0/tokio/io/struct.Stdin.html
let stdin = tokio::io::stdin();
let bytes = fixi.bytes(path)?;
let _ = bytes.write(stdin).await?;
if commit {
let addr = bytes.commit().await?;
println!("{:?}", addr);
} else {
println!("bytes staged");
}
Ok(())
}
|
value: Value,
},
|
random_line_split
|
fixi.rs
|
use {
fixity::{
fixity::Builder,
path::Path,
value::{Key, Value},
Config, Fixity,
},
std::path::PathBuf,
structopt::StructOpt,
};
#[derive(Debug, StructOpt)]
#[structopt(name = "fixi", about = "fixity content management")]
struct Opt {
#[structopt(flatten)]
fixi_opt: FixiOpt,
#[structopt(subcommand)]
subcmd: Command,
}
/// An temporary config setting up Fixi with the limited in-dev options
/// it has at the moment.
///
/// In the near future this will be revamped to support complex configuration,
/// which may or may not be managed by StructOpt.
#[derive(Debug, StructOpt)]
struct FixiOpt {
// #[structopt(long, env = "GLOBAL_FIXI_DIR")]
// pub global_fixi_dir: Option<PathBuf>,
#[structopt(long, env = "FIXI_DIR_NAME")]
pub fixi_dir_name: Option<PathBuf>,
#[structopt(long, env = "FIXI_BASE_PATH")]
pub base_path: Option<PathBuf>,
#[structopt(long, env = "FIXI_WORKSPACE", default_value = "default")]
pub workspace: String,
}
#[derive(Debug, StructOpt)]
enum Command {
Init,
/// A Map interface to Fixity data.
///
/// Map is a primarily low level interface, enabling insight and mutation on the raw
/// Key-Value format of Fixity.
Map {
/// The destination to write a `Value` to.
#[structopt(short = "p", long = "path", parse(try_from_str = Path::from_cli_str), default_value = "")]
path: Path,
#[structopt(subcommand)]
subcmd: MapSubcmd,
},
/// A raw bytes interface to Fixity, allowing reading and writing of arbitrary bytes at the
/// provided `Path` and deduplicated via content defined chunking.
Bytes {
/// The destination to write bytes to.
#[structopt(short = "p", long = "path", parse(try_from_str = Path::from_cli_str), default_value = "")]
path: Path,
#[structopt(subcommand)]
subcmd: BytesSubcmd,
},
}
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("error: {0}")]
User(String),
#[error("fixity error: {0}")]
Fixity(#[from] fixity::Error),
#[error("fixity storage error: {0}")]
StorageError(#[from] fixity::core::storage::Error),
}
#[tokio::main]
async fn main() -> Result<(), Error> {
env_logger::from_env(env_logger::Env::default().default_filter_or("error")).init();
let opt = Opt::from_args();
let fixi = {
let FixiOpt {
fixi_dir_name,
base_path,
workspace,
} = opt.fixi_opt;
let config = Config::builder()
.with_fixi_dir_name(fixi_dir_name)
.with_base_path(base_path)
.with_workspace_name(Some(workspace));
let builder = Fixity::builder().with_config(config.clone());
match opt.subcmd {
Command::Init => {
return cmd_init(builder).await;
},
_ => builder.open().await?,
}
};
match opt.subcmd {
Command::Init => unreachable!("matched above"),
Command::Map { path, subcmd } => cmd_map(fixi, path, subcmd).await,
Command::Bytes { path, subcmd } => cmd_bytes(fixi, path, subcmd).await,
}
}
async fn cmd_init(builder: Builder) -> Result<(), Error> {
let (_, config) = builder.init_config().await?;
println!(
"created Fixity repository at: {}",
config.fixi_dir_path.to_string_lossy()
);
Ok(())
}
#[derive(Debug, StructOpt)]
enum MapSubcmd {
Get {
/// The `Key` to get a `Value` from.
#[structopt(name = "KEY", parse(try_from_str = Key::from_cli_str))]
key: Key,
},
Put {
/// Optionally immediately commit this and any staged changes.
#[structopt(short = "c", long = "commit")]
commit: bool,
/// The `Key` to write a `Value` or Bytes to.
#[structopt(name = "KEY", parse(try_from_str = Key::from_cli_str))]
key: Key,
/// Write the [`Value`] to the given [`Key`].
#[structopt(
name = "VALUE", parse(try_from_str = Value::from_cli_str),
)]
value: Value,
},
Ls {
#[structopt(short = "s", long = "start", parse(try_from_str = Key::from_cli_str))]
start: Option<Key>,
#[structopt(short = "e", long = "end", parse(try_from_str = Key::from_cli_str))]
end: Option<Key>,
},
}
async fn cmd_map(fixi: Fixity, path: Path, subcmd: MapSubcmd) -> Result<(), Error> {
match subcmd {
MapSubcmd::Get { key } => cmd_map_get(fixi, path, key).await,
MapSubcmd::Put { commit, key, value } => cmd_map_put(fixi, path, commit, key, value).await,
MapSubcmd::Ls { start, end } => cmd_map_ls(fixi, path, start, end).await,
}
}
async fn cmd_map_get(fixi: Fixity, path: Path, key: Key) -> Result<(), Error> {
let map = fixi.map(path);
let v = map.get(key).await?;
println!("{:?}", v);
Ok(())
}
async fn cmd_map_put(
fixi: Fixity,
path: Path,
commit: bool,
key: Key,
value: Value,
) -> Result<(), Error> {
let mut map = fixi.map(path);
map.insert(key, value).await?;
if commit {
let addr = map.commit().await?;
println!("{:?}", addr);
}
Ok(())
}
async fn cmd_map_ls(
fixi: Fixity,
path: Path,
start: Option<Key>,
end: Option<Key>,
) -> Result<(), Error> {
let map = fixi.map(path);
let mut iter = match (start, end) {
(Some(start), Some(end)) => map.iter(start..end).await?,
(Some(start), None) => map.iter(start..).await?,
(None, Some(end)) => map.iter(..end).await?,
(None, None) => map.iter(..).await?,
};
while let Some(res) = iter.next() {
let (key, value) = res?;
println!("{}={}", key, value);
}
Ok(())
}
#[derive(Debug, StructOpt)]
enum BytesSubcmd {
Get,
Put {
/// Optionally immediately commit this and any staged changes.
#[structopt(short = "c", long = "commit")]
commit: bool,
},
}
async fn cmd_bytes(fixi: Fixity, path: Path, subcmd: BytesSubcmd) -> Result<(), Error> {
match subcmd {
BytesSubcmd::Get => cmd_bytes_get(fixi, path).await,
BytesSubcmd::Put { commit } => cmd_bytes_put(fixi, path, commit).await,
}
}
async fn cmd_bytes_get(fixi: Fixity, path: Path) -> Result<(), Error> {
let stdout = tokio::io::stdout();
let bytes = fixi.bytes(path)?;
let _ = bytes.read(stdout).await?;
Ok(())
}
async fn cmd_bytes_put(fixi: Fixity, path: Path, commit: bool) -> Result<(), Error>
|
let stdin = tokio::io::stdin();
let bytes = fixi.bytes(path)?;
let _ = bytes.write(stdin).await?;
if commit {
let addr = bytes.commit().await?;
println!("{:?}", addr);
} else {
println!("bytes staged");
}
Ok(())
}
|
{
if path.len() == 0 {
return Err(Error::User(
"cannot get/put bytes to root of fixity repository".to_owned(),
));
}
// TODO: tokio docs recommend against this for interactive uses[1], so this
// should be fixed eventually - when interactivity is prioritized a bit more.
//
// Excerpt for context:
//
// > This handle is best used for non-interactive uses, such as when a file is piped
// > into the application. For technical reasons, stdin is implemented by using an ordinary
// > blocking read on a separate thread, and it is impossible to cancel that read.
// > This can make shutdown of the runtime hang until the user presses enter.
// >
// > For interactive uses, it is recommended to spawn a thread dedicated to user input and
// > use blocking IO directly in that thread.
//
// [1]: https://docs.rs/tokio/1.2.0/tokio/io/struct.Stdin.html
|
identifier_body
|
subscription.rs
|
//! Helper types for converting types to `Result<T, FieldError<E>>`.
//!
//! Used in `#[graphql_subscription]` macros to convert result type aliases on
//! subscription handlers to a concrete return type.
use futures::Stream;
use crate::{FieldError, GraphQLValue, IntoFieldError, ScalarValue};
/// Trait for wrapping [`Stream`] into [`Ok`] if it's not [`Result`].
///
/// Used in subscription macros when user can provide type alias for [`Stream`] or
/// `Result<Stream, _>` and then a function on [`Stream`] should be called.
pub trait IntoFieldResult<T, S> {
/// Type of items yielded by this [`Stream`].
type Item;
/// Turns current [`Stream`] type into a generic [`Result`].
fn into_result(self) -> Result<T, FieldError<S>>;
}
impl<T, E, S> IntoFieldResult<T, S> for Result<T, E>
where
T: IntoFieldResult<T, S>,
E: IntoFieldError<S>,
{
type Item = T::Item;
fn into_result(self) -> Result<T, FieldError<S>> {
self.map_err(E::into_field_error)
}
}
impl<T, S> IntoFieldResult<T, S> for T
where
T: Stream,
{
type Item = T::Item;
fn into_result(self) -> Result<T, FieldError<S>> {
Ok(self)
}
}
/// This struct is used in `ExtractTypeFromStream` implementation for streams
/// of values.
pub struct StreamItem;
/// This struct is used in `ExtractTypeFromStream` implementation for results
/// with streams of values inside.
pub struct StreamResult;
/// This struct is used in `ExtractTypeFromStream` implementation for streams
/// of results of values inside.
pub struct ResultStreamItem;
/// This struct is used in `ExtractTypeFromStream` implementation for results
/// with streams of results of values inside.
pub struct
|
;
/// This trait is used in `juniper::graphql_subscription` macro to get stream's
/// item type that implements `GraphQLValue` from type alias provided
/// by user.
pub trait ExtractTypeFromStream<T, S>
where
S: ScalarValue,
{
/// Stream's return Value that will be returned if
/// no errors occured. Is used to determine field type in
/// `#[juniper::graphql_subscription]`
type Item: GraphQLValue<S>;
}
impl<T, I, S> ExtractTypeFromStream<StreamItem, S> for T
where
T: futures::Stream<Item = I>,
I: GraphQLValue<S>,
S: ScalarValue,
{
type Item = I;
}
impl<Ty, T, E, S> ExtractTypeFromStream<StreamResult, S> for Ty
where
Ty: futures::Stream<Item = Result<T, E>>,
T: GraphQLValue<S>,
S: ScalarValue,
{
type Item = T;
}
impl<T, I, E, S> ExtractTypeFromStream<ResultStreamItem, S> for Result<T, E>
where
T: futures::Stream<Item = I>,
I: GraphQLValue<S>,
S: ScalarValue,
{
type Item = I;
}
impl<T, E, I, ER, S> ExtractTypeFromStream<ResultStreamResult, S> for Result<T, E>
where
T: futures::Stream<Item = Result<I, ER>>,
I: GraphQLValue<S>,
S: ScalarValue,
{
type Item = I;
}
|
ResultStreamResult
|
identifier_name
|
subscription.rs
|
//! Helper types for converting types to `Result<T, FieldError<E>>`.
//!
//! Used in `#[graphql_subscription]` macros to convert result type aliases on
//! subscription handlers to a concrete return type.
use futures::Stream;
use crate::{FieldError, GraphQLValue, IntoFieldError, ScalarValue};
/// Trait for wrapping [`Stream`] into [`Ok`] if it's not [`Result`].
///
/// Used in subscription macros when user can provide type alias for [`Stream`] or
/// `Result<Stream, _>` and then a function on [`Stream`] should be called.
pub trait IntoFieldResult<T, S> {
/// Type of items yielded by this [`Stream`].
type Item;
/// Turns current [`Stream`] type into a generic [`Result`].
fn into_result(self) -> Result<T, FieldError<S>>;
}
impl<T, E, S> IntoFieldResult<T, S> for Result<T, E>
where
T: IntoFieldResult<T, S>,
E: IntoFieldError<S>,
{
type Item = T::Item;
fn into_result(self) -> Result<T, FieldError<S>> {
self.map_err(E::into_field_error)
}
}
impl<T, S> IntoFieldResult<T, S> for T
where
T: Stream,
{
type Item = T::Item;
fn into_result(self) -> Result<T, FieldError<S>> {
Ok(self)
}
}
|
/// of values.
pub struct StreamItem;
/// This struct is used in `ExtractTypeFromStream` implementation for results
/// with streams of values inside.
pub struct StreamResult;
/// This struct is used in `ExtractTypeFromStream` implementation for streams
/// of results of values inside.
pub struct ResultStreamItem;
/// This struct is used in `ExtractTypeFromStream` implementation for results
/// with streams of results of values inside.
pub struct ResultStreamResult;
/// This trait is used in `juniper::graphql_subscription` macro to get stream's
/// item type that implements `GraphQLValue` from type alias provided
/// by user.
pub trait ExtractTypeFromStream<T, S>
where
S: ScalarValue,
{
/// Stream's return Value that will be returned if
/// no errors occured. Is used to determine field type in
/// `#[juniper::graphql_subscription]`
type Item: GraphQLValue<S>;
}
impl<T, I, S> ExtractTypeFromStream<StreamItem, S> for T
where
T: futures::Stream<Item = I>,
I: GraphQLValue<S>,
S: ScalarValue,
{
type Item = I;
}
impl<Ty, T, E, S> ExtractTypeFromStream<StreamResult, S> for Ty
where
Ty: futures::Stream<Item = Result<T, E>>,
T: GraphQLValue<S>,
S: ScalarValue,
{
type Item = T;
}
impl<T, I, E, S> ExtractTypeFromStream<ResultStreamItem, S> for Result<T, E>
where
T: futures::Stream<Item = I>,
I: GraphQLValue<S>,
S: ScalarValue,
{
type Item = I;
}
impl<T, E, I, ER, S> ExtractTypeFromStream<ResultStreamResult, S> for Result<T, E>
where
T: futures::Stream<Item = Result<I, ER>>,
I: GraphQLValue<S>,
S: ScalarValue,
{
type Item = I;
}
|
/// This struct is used in `ExtractTypeFromStream` implementation for streams
|
random_line_split
|
threading.rs
|
use std::sync::mpsc::{Receiver as MpscReceiver, TryRecvError};
use std::thread::Builder as ThreadBuilder;
use super::connection::Connection;
use super::Status;
use ::internal::Timer;
use ::model::GuildId;
pub fn start(guild_id: GuildId, rx: MpscReceiver<Status>)
|
fn runner(rx: &MpscReceiver<Status>) {
let mut sender = None;
let mut receiver = None;
let mut connection = None;
let mut timer = Timer::new(20);
'runner: loop {
loop {
match rx.try_recv() {
Ok(Status::Connect(info)) => {
connection = match Connection::new(info) {
Ok(connection) => {
Some(connection)
},
Err(why) => {
warn!("[Voice] Error connecting: {:?}", why);
None
},
};
},
Ok(Status::Disconnect) => {
connection = None;
},
Ok(Status::SetReceiver(r)) => {
receiver = r;
},
Ok(Status::SetSender(s)) => {
sender = s;
},
Err(TryRecvError::Empty) => {
// If we receieved nothing, then we can perform an update.
break;
},
Err(TryRecvError::Disconnected) => {
break 'runner;
},
}
}
// Overall here, check if there's an error.
//
// If there is a connection, try to send an update. This should not
// error. If there is though for some spurious reason, then set `error`
// to `true`.
//
// Otherwise, wait out the timer and do _not_ error and wait to receive
// another event.
let error = match connection.as_mut() {
Some(connection) => {
let cycle = connection.cycle(&mut sender,
&mut receiver,
&mut timer);
match cycle {
Ok(()) => false,
Err(why) => {
error!("(╯°□°)╯︵ ┻━┻ Error updating connection: {:?}",
why);
true
},
}
},
None => {
timer.await();
false
},
};
// If there was an error, then just reset the connection and try to get
// another.
if error {
connection = None;
}
}
}
|
{
let name = format!("Serenity Voice (G{})", guild_id);
ThreadBuilder::new()
.name(name)
.spawn(move || runner(&rx))
.expect(&format!("[Voice] Error starting guild: {:?}", guild_id));
}
|
identifier_body
|
threading.rs
|
use std::sync::mpsc::{Receiver as MpscReceiver, TryRecvError};
use std::thread::Builder as ThreadBuilder;
use super::connection::Connection;
use super::Status;
use ::internal::Timer;
use ::model::GuildId;
pub fn start(guild_id: GuildId, rx: MpscReceiver<Status>) {
let name = format!("Serenity Voice (G{})", guild_id);
ThreadBuilder::new()
.name(name)
.spawn(move || runner(&rx))
.expect(&format!("[Voice] Error starting guild: {:?}", guild_id));
}
fn runner(rx: &MpscReceiver<Status>) {
|
let mut sender = None;
let mut receiver = None;
let mut connection = None;
let mut timer = Timer::new(20);
'runner: loop {
loop {
match rx.try_recv() {
Ok(Status::Connect(info)) => {
connection = match Connection::new(info) {
Ok(connection) => {
Some(connection)
},
Err(why) => {
warn!("[Voice] Error connecting: {:?}", why);
None
},
};
},
Ok(Status::Disconnect) => {
connection = None;
},
Ok(Status::SetReceiver(r)) => {
receiver = r;
},
Ok(Status::SetSender(s)) => {
sender = s;
},
Err(TryRecvError::Empty) => {
// If we receieved nothing, then we can perform an update.
break;
},
Err(TryRecvError::Disconnected) => {
break 'runner;
},
}
}
// Overall here, check if there's an error.
//
// If there is a connection, try to send an update. This should not
// error. If there is though for some spurious reason, then set `error`
// to `true`.
//
// Otherwise, wait out the timer and do _not_ error and wait to receive
// another event.
let error = match connection.as_mut() {
Some(connection) => {
let cycle = connection.cycle(&mut sender,
&mut receiver,
&mut timer);
match cycle {
Ok(()) => false,
Err(why) => {
error!("(╯°□°)╯︵ ┻━┻ Error updating connection: {:?}",
why);
true
},
}
},
None => {
timer.await();
false
},
};
// If there was an error, then just reset the connection and try to get
// another.
if error {
connection = None;
}
}
}
|
random_line_split
|
|
threading.rs
|
use std::sync::mpsc::{Receiver as MpscReceiver, TryRecvError};
use std::thread::Builder as ThreadBuilder;
use super::connection::Connection;
use super::Status;
use ::internal::Timer;
use ::model::GuildId;
pub fn
|
(guild_id: GuildId, rx: MpscReceiver<Status>) {
let name = format!("Serenity Voice (G{})", guild_id);
ThreadBuilder::new()
.name(name)
.spawn(move || runner(&rx))
.expect(&format!("[Voice] Error starting guild: {:?}", guild_id));
}
fn runner(rx: &MpscReceiver<Status>) {
let mut sender = None;
let mut receiver = None;
let mut connection = None;
let mut timer = Timer::new(20);
'runner: loop {
loop {
match rx.try_recv() {
Ok(Status::Connect(info)) => {
connection = match Connection::new(info) {
Ok(connection) => {
Some(connection)
},
Err(why) => {
warn!("[Voice] Error connecting: {:?}", why);
None
},
};
},
Ok(Status::Disconnect) => {
connection = None;
},
Ok(Status::SetReceiver(r)) => {
receiver = r;
},
Ok(Status::SetSender(s)) => {
sender = s;
},
Err(TryRecvError::Empty) => {
// If we receieved nothing, then we can perform an update.
break;
},
Err(TryRecvError::Disconnected) => {
break 'runner;
},
}
}
// Overall here, check if there's an error.
//
// If there is a connection, try to send an update. This should not
// error. If there is though for some spurious reason, then set `error`
// to `true`.
//
// Otherwise, wait out the timer and do _not_ error and wait to receive
// another event.
let error = match connection.as_mut() {
Some(connection) => {
let cycle = connection.cycle(&mut sender,
&mut receiver,
&mut timer);
match cycle {
Ok(()) => false,
Err(why) => {
error!("(╯°□°)╯︵ ┻━┻ Error updating connection: {:?}",
why);
true
},
}
},
None => {
timer.await();
false
},
};
// If there was an error, then just reset the connection and try to get
// another.
if error {
connection = None;
}
}
}
|
start
|
identifier_name
|
config.rs
|
// CITA
// Copyright 2016-2017 Cryptape Technologies LLC.
// This program is free software: you can redistribute it
// and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation,
// either version 3 of the License, or (at your option) any
// later version.
// This program is distributed in the hope that it will be
// useful, but WITHOUT ANY WARRANTY; without even the implied
// warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
// PURPOSE. See the GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
extern crate toml;
use std::fs::File;
use std::io::BufReader;
use std::io::prelude::*;
#[derive(Debug, RustcDecodable)]
pub struct NetConfig {
pub id_card: Option<u32>,
pub port: Option<u64>,
pub max_peer: Option<u64>,
pub peers: Option<Vec<PeerConfig>>,
}
#[derive(Debug, RustcDecodable)]
pub struct PeerConfig {
pub id_card: Option<u32>,
pub ip: Option<String>,
pub port: Option<u64>,
}
impl NetConfig {
pub fn new(path: &str) -> Self {
let config_file = File::open(path).unwrap();
let mut fconfig = BufReader::new(config_file);
let mut content = String::new();
fconfig.read_to_string(&mut content).unwrap();
toml::decode_str(&content).unwrap()
}
pub fn
|
() -> Self {
let toml = r#"
id_card=0
port = 40000
max_peer = 1
[[peers]]
id_card=0
ip = "127.0.0.1"
port = 40000
"#;
toml::decode_str(toml).unwrap()
}
}
#[cfg(test)]
mod test {
use super::NetConfig;
extern crate toml;
#[test]
fn basics() {
let toml = r#"
port = 40000
max_peer = 2
[[peers]]
ip = "127.0.0.1"
port = 40001
[[peers]]
ip = "127.0.0.1"
port = 40002
"#;
let value: NetConfig = toml::decode_str(toml).unwrap();
println!("{:?}", value);
assert_eq!(value.port, Some(40000));
}
}
|
test_config
|
identifier_name
|
config.rs
|
// CITA
// Copyright 2016-2017 Cryptape Technologies LLC.
// This program is free software: you can redistribute it
// and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation,
// either version 3 of the License, or (at your option) any
// later version.
// This program is distributed in the hope that it will be
// useful, but WITHOUT ANY WARRANTY; without even the implied
// warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
// PURPOSE. See the GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
extern crate toml;
use std::fs::File;
use std::io::BufReader;
use std::io::prelude::*;
#[derive(Debug, RustcDecodable)]
pub struct NetConfig {
pub id_card: Option<u32>,
pub port: Option<u64>,
pub max_peer: Option<u64>,
pub peers: Option<Vec<PeerConfig>>,
}
#[derive(Debug, RustcDecodable)]
pub struct PeerConfig {
pub id_card: Option<u32>,
pub ip: Option<String>,
pub port: Option<u64>,
}
impl NetConfig {
pub fn new(path: &str) -> Self {
let config_file = File::open(path).unwrap();
let mut fconfig = BufReader::new(config_file);
let mut content = String::new();
fconfig.read_to_string(&mut content).unwrap();
toml::decode_str(&content).unwrap()
}
pub fn test_config() -> Self
|
}
#[cfg(test)]
mod test {
use super::NetConfig;
extern crate toml;
#[test]
fn basics() {
let toml = r#"
port = 40000
max_peer = 2
[[peers]]
ip = "127.0.0.1"
port = 40001
[[peers]]
ip = "127.0.0.1"
port = 40002
"#;
let value: NetConfig = toml::decode_str(toml).unwrap();
println!("{:?}", value);
assert_eq!(value.port, Some(40000));
}
}
|
{
let toml = r#"
id_card=0
port = 40000
max_peer = 1
[[peers]]
id_card=0
ip = "127.0.0.1"
port = 40000
"#;
toml::decode_str(toml).unwrap()
}
|
identifier_body
|
config.rs
|
// CITA
// Copyright 2016-2017 Cryptape Technologies LLC.
// This program is free software: you can redistribute it
// and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation,
// either version 3 of the License, or (at your option) any
// later version.
// This program is distributed in the hope that it will be
// useful, but WITHOUT ANY WARRANTY; without even the implied
// warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
// PURPOSE. See the GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
extern crate toml;
use std::fs::File;
use std::io::BufReader;
use std::io::prelude::*;
#[derive(Debug, RustcDecodable)]
pub struct NetConfig {
pub id_card: Option<u32>,
pub port: Option<u64>,
pub max_peer: Option<u64>,
pub peers: Option<Vec<PeerConfig>>,
}
#[derive(Debug, RustcDecodable)]
pub struct PeerConfig {
pub id_card: Option<u32>,
pub ip: Option<String>,
pub port: Option<u64>,
}
impl NetConfig {
pub fn new(path: &str) -> Self {
let config_file = File::open(path).unwrap();
let mut fconfig = BufReader::new(config_file);
let mut content = String::new();
fconfig.read_to_string(&mut content).unwrap();
toml::decode_str(&content).unwrap()
}
pub fn test_config() -> Self {
|
id_card=0
port = 40000
max_peer = 1
[[peers]]
id_card=0
ip = "127.0.0.1"
port = 40000
"#;
toml::decode_str(toml).unwrap()
}
}
#[cfg(test)]
mod test {
use super::NetConfig;
extern crate toml;
#[test]
fn basics() {
let toml = r#"
port = 40000
max_peer = 2
[[peers]]
ip = "127.0.0.1"
port = 40001
[[peers]]
ip = "127.0.0.1"
port = 40002
"#;
let value: NetConfig = toml::decode_str(toml).unwrap();
println!("{:?}", value);
assert_eq!(value.port, Some(40000));
}
}
|
let toml = r#"
|
random_line_split
|
cci_nested_lib.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[legacy_modes];
pub struct Entry<A,B> {key: A, value: B}
pub struct alist<A,B> { eq_fn: @fn(A,A) -> bool, data: @mut ~[Entry<A,B>] }
pub fn alist_add<A:Copy,B:Copy>(lst: alist<A,B>, k: A, v: B) {
lst.data.push(Entry{key:k, value:v});
}
pub fn alist_get<A:Copy,B:Copy>(lst: alist<A,B>, k: A) -> B {
let eq_fn = lst.eq_fn;
for lst.data.each |entry| {
if eq_fn(entry.key, k)
|
}
fail!();
}
#[inline]
pub fn new_int_alist<B:Copy>() -> alist<int, B> {
fn eq_int(&&a: int, &&b: int) -> bool { a == b }
return alist {eq_fn: eq_int, data: @mut ~[]};
}
#[inline]
pub fn new_int_alist_2<B:Copy>() -> alist<int, B> {
#[inline]
fn eq_int(&&a: int, &&b: int) -> bool { a == b }
return alist {eq_fn: eq_int, data: @mut ~[]};
}
|
{ return entry.value; }
|
conditional_block
|
cci_nested_lib.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[legacy_modes];
pub struct Entry<A,B> {key: A, value: B}
pub struct alist<A,B> { eq_fn: @fn(A,A) -> bool, data: @mut ~[Entry<A,B>] }
pub fn alist_add<A:Copy,B:Copy>(lst: alist<A,B>, k: A, v: B) {
lst.data.push(Entry{key:k, value:v});
}
pub fn alist_get<A:Copy,B:Copy>(lst: alist<A,B>, k: A) -> B
|
#[inline]
pub fn new_int_alist<B:Copy>() -> alist<int, B> {
fn eq_int(&&a: int, &&b: int) -> bool { a == b }
return alist {eq_fn: eq_int, data: @mut ~[]};
}
#[inline]
pub fn new_int_alist_2<B:Copy>() -> alist<int, B> {
#[inline]
fn eq_int(&&a: int, &&b: int) -> bool { a == b }
return alist {eq_fn: eq_int, data: @mut ~[]};
}
|
{
let eq_fn = lst.eq_fn;
for lst.data.each |entry| {
if eq_fn(entry.key, k) { return entry.value; }
}
fail!();
}
|
identifier_body
|
cci_nested_lib.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[legacy_modes];
pub struct Entry<A,B> {key: A, value: B}
pub struct alist<A,B> { eq_fn: @fn(A,A) -> bool, data: @mut ~[Entry<A,B>] }
pub fn
|
<A:Copy,B:Copy>(lst: alist<A,B>, k: A, v: B) {
lst.data.push(Entry{key:k, value:v});
}
pub fn alist_get<A:Copy,B:Copy>(lst: alist<A,B>, k: A) -> B {
let eq_fn = lst.eq_fn;
for lst.data.each |entry| {
if eq_fn(entry.key, k) { return entry.value; }
}
fail!();
}
#[inline]
pub fn new_int_alist<B:Copy>() -> alist<int, B> {
fn eq_int(&&a: int, &&b: int) -> bool { a == b }
return alist {eq_fn: eq_int, data: @mut ~[]};
}
#[inline]
pub fn new_int_alist_2<B:Copy>() -> alist<int, B> {
#[inline]
fn eq_int(&&a: int, &&b: int) -> bool { a == b }
return alist {eq_fn: eq_int, data: @mut ~[]};
}
|
alist_add
|
identifier_name
|
cci_nested_lib.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
#[legacy_modes];
pub struct Entry<A,B> {key: A, value: B}
pub struct alist<A,B> { eq_fn: @fn(A,A) -> bool, data: @mut ~[Entry<A,B>] }
pub fn alist_add<A:Copy,B:Copy>(lst: alist<A,B>, k: A, v: B) {
lst.data.push(Entry{key:k, value:v});
}
pub fn alist_get<A:Copy,B:Copy>(lst: alist<A,B>, k: A) -> B {
let eq_fn = lst.eq_fn;
for lst.data.each |entry| {
if eq_fn(entry.key, k) { return entry.value; }
}
fail!();
}
#[inline]
pub fn new_int_alist<B:Copy>() -> alist<int, B> {
fn eq_int(&&a: int, &&b: int) -> bool { a == b }
return alist {eq_fn: eq_int, data: @mut ~[]};
}
#[inline]
pub fn new_int_alist_2<B:Copy>() -> alist<int, B> {
#[inline]
fn eq_int(&&a: int, &&b: int) -> bool { a == b }
return alist {eq_fn: eq_int, data: @mut ~[]};
}
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
random_line_split
|
issue-2311-2.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait clam<A:Copy> { }
struct foo<A> {
x: A,
}
impl<A:Copy> foo<A> {
pub fn bar<B,C:clam<A>>(&self, c: C) -> B
|
}
fn foo<A:Copy>(b: A) -> foo<A> {
foo {
x: b
}
}
pub fn main() { }
|
{
fail!();
}
|
identifier_body
|
issue-2311-2.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait clam<A:Copy> { }
struct foo<A> {
x: A,
}
impl<A:Copy> foo<A> {
|
}
fn foo<A:Copy>(b: A) -> foo<A> {
foo {
x: b
}
}
pub fn main() { }
|
pub fn bar<B,C:clam<A>>(&self, c: C) -> B {
fail!();
}
|
random_line_split
|
issue-2311-2.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait clam<A:Copy> { }
struct foo<A> {
x: A,
}
impl<A:Copy> foo<A> {
pub fn bar<B,C:clam<A>>(&self, c: C) -> B {
fail!();
}
}
fn foo<A:Copy>(b: A) -> foo<A> {
foo {
x: b
}
}
pub fn
|
() { }
|
main
|
identifier_name
|
cstool.rs
|
//! Disassembles machine code
use std::fmt::Display;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::process::exit;
use std::str::FromStr;
use capstone::{self, prelude::*, Arch, Endian, EnumList, ExtraMode, Mode};
use clap::{App, Arg, ArgGroup};
use log::{debug, info};
const DEFAULT_CAPACITY: usize = 1024;
trait ExpectExit<T> {
fn expect_exit(self) -> T;
}
impl<T, E> ExpectExit<T> for Result<T, E>
where
E: Display,
{
fn expect_exit(self) -> T {
match self {
Ok(t) => t,
Err(e) => {
eprintln!("error: {}", e);
exit(1);
}
}
}
}
/// Print register names
fn reg_names(cs: &Capstone, regs: &[RegId]) -> String {
let names: Vec<String> = regs.iter().map(|&x| cs.reg_name(x).unwrap()).collect();
names.join(", ")
}
/// Print instruction group names
fn group_names(cs: &Capstone, regs: &[InsnGroupId]) -> String {
let names: Vec<String> = regs.iter().map(|&x| cs.group_name(x).unwrap()).collect();
names.join(", ")
}
/// Select only hex bytes from input
fn unhexed_bytes(input: Vec<u8>) -> Vec<u8> {
let mut output: Vec<u8> = Vec::new();
let mut curr_byte_str = String::with_capacity(2);
for b_u8 in input {
let b = char::from(b_u8);
if ('0'..='9').contains(&b) || ('a'..='f').contains(&b) || ('A'..='F').contains(&b) {
curr_byte_str.push(b);
}
if curr_byte_str.len() == 2 {
debug!(" curr_byte_str={:?}", curr_byte_str);
let byte = u8::from_str_radix(&curr_byte_str, 16).expect("Unexpect hex parse error");
output.push(byte);
curr_byte_str.clear();
}
}
if log::max_level() >= log::LevelFilter::Info {
let output_hex: Vec<String> = output.iter().map(|x| format!("{:02x}", x)).collect();
info!("unhexed_output = {:?}", output_hex);
}
output
}
fn disasm<T: Iterator<Item = ExtraMode>>(
arch: Arch,
mode: Mode,
extra_mode: T,
endian: Option<Endian>,
code: &[u8],
addr: u64,
show_detail: bool,
)
|
i.op_str().unwrap_or("")
)
.is_ok();
if show_detail {
let detail = cs.insn_detail(i).expect("Failed to get insn detail");
let output: &[(&str, String)] = &[
("insn id:", format!("{:?}", i.id().0)),
("read regs:", reg_names(&cs, detail.regs_read())),
("write regs:", reg_names(&cs, detail.regs_write())),
("insn groups:", group_names(&cs, detail.groups())),
];
for &(ref name, ref message) in output.iter() {
let _ = writeln!(&mut handle, "{:13}{:12} {}", "", name, message).is_ok();
}
}
}
}
const FILE_ARG: &str = "file";
const STDIN_ARG: &str = "stdin";
const CODE_ARG: &str = "code";
const ADDRESS_ARG: &str = "address";
const VERBOSE_ARG: &str = "verbose";
const HEX_ARG: &str = "hex";
const DETAIL_ARG: &str = "detail";
const ARCH_ARG: &str = "arch";
const MODE_ARG: &str = "mode";
const EXTRA_MODE_ARG: &str = "extra";
const ENDIAN_ARG: &str = "endian";
const AFTER_HELP: &str = r#"
Example:
# Disassemble 32-bit X86 (non-hex characters are ignored)
cstool --arch x86 --mode mode32 --code "90 42 e812345678"
1000: 90 nop
1001: 42 inc edx
1002: e8 12 34 56 78 call 0x78564419
"#;
fn main() {
// Lowercase arches
let _arches: Vec<String> = Arch::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let arches: Vec<&str> = _arches.iter().map(|x| x.as_str()).collect();
// Lowercase modes
let _modes: Vec<String> = Mode::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let modes: Vec<&str> = _modes.iter().map(|x| x.as_str()).collect();
// Lowercase extra modes
let _extra_modes: Vec<String> = ExtraMode::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let extra_modes: Vec<&str> = _extra_modes.iter().map(|x| x.as_str()).collect();
let matches = App::new("capstone-rs disassembler tool")
.about("Disassembles binary file")
.after_help(AFTER_HELP)
.arg(
Arg::with_name(FILE_ARG)
.short("f")
.long(FILE_ARG)
.help("input file with binary instructions")
.takes_value(true),
)
.arg(
Arg::with_name(STDIN_ARG)
.short("s")
.long(STDIN_ARG)
.help("read binary instructions from stdin")
.takes_value(false),
)
.arg(
Arg::with_name(CODE_ARG)
.short("c")
.long(CODE_ARG)
.help("instruction bytes (implies --hex)")
.takes_value(true),
)
.arg(
Arg::with_name(ADDRESS_ARG)
.short("r")
.long("addr")
.help("address of code")
.takes_value(true),
)
.arg(
Arg::with_name(VERBOSE_ARG)
.short("v")
.multiple(true)
.help("Sets the level of verbosity"),
)
.arg(
Arg::with_name(HEX_ARG)
.short("x")
.long(HEX_ARG)
.help("Treat input has hex; only select characters that are [a-fA-F0-9]")
.takes_value(false),
)
.arg(
Arg::with_name(DETAIL_ARG)
.short("d")
.long(DETAIL_ARG)
.help("Print details about instructions")
.takes_value(false),
)
.arg(
Arg::with_name(ARCH_ARG)
.short("a")
.long(ARCH_ARG)
.help("Architecture")
.takes_value(true)
.required(true)
.possible_values(arches.as_slice())
.case_insensitive(true),
)
.arg(
Arg::with_name(MODE_ARG)
.short("m")
.long(MODE_ARG)
.help(MODE_ARG)
.takes_value(true)
.required(true)
.possible_values(modes.as_slice())
.case_insensitive(true),
)
.arg(
Arg::with_name(EXTRA_MODE_ARG)
.short("e")
.long(EXTRA_MODE_ARG)
.help("Extra Mode")
.takes_value(true)
.required(false)
.possible_values(extra_modes.as_slice())
.case_insensitive(true)
.multiple(true),
)
.arg(
Arg::with_name(ENDIAN_ARG)
.short("n")
.long(ENDIAN_ARG)
.help("Endianness")
.takes_value(true)
.required(false)
.possible_values(&["little", "big"])
.case_insensitive(true),
)
.group(
ArgGroup::with_name("input")
.arg(FILE_ARG)
.arg(STDIN_ARG)
.arg(CODE_ARG)
.required(true),
)
.get_matches();
let direct_input_bytes: Vec<u8> = if let Some(file_path) = matches.value_of(FILE_ARG) {
let mut file = File::open(file_path).expect_exit();
let capacity = match file.metadata() {
Err(_) => DEFAULT_CAPACITY,
Ok(metadata) => metadata.len() as usize,
};
let mut buf = Vec::with_capacity(capacity as usize);
file.read_to_end(&mut buf).expect_exit();
buf
} else if let Some(code) = matches.value_of(CODE_ARG) {
code.as_bytes().iter().copied().collect()
} else {
let mut buf = Vec::with_capacity(DEFAULT_CAPACITY);
let stdin = std::io::stdin();
stdin.lock().read_to_end(&mut buf).expect_exit();
buf
};
stderrlog::new()
.verbosity(matches.occurrences_of(VERBOSE_ARG) as usize)
.init()
.unwrap();
let is_hex = matches.is_present(HEX_ARG) || matches.is_present(CODE_ARG);
info!("is_hex = {:?}", is_hex);
let show_detail = matches.is_present(DETAIL_ARG);
info!("show_detail = {:?}", show_detail);
let arch: Arch = Arch::from_str(matches.value_of(ARCH_ARG).unwrap()).unwrap();
info!("Arch = {:?}", arch);
let mode: Mode = Mode::from_str(matches.value_of(MODE_ARG).unwrap()).unwrap();
info!("Mode = {:?}", mode);
let extra_mode: Vec<_> = match matches.values_of(EXTRA_MODE_ARG) {
None => Vec::with_capacity(0),
Some(x) => x.map(|x| ExtraMode::from_str(x).unwrap()).collect(),
};
info!("ExtraMode = {:?}", extra_mode);
let endian: Option<Endian> = matches
.value_of(ENDIAN_ARG)
.map(|x| Endian::from_str(x).expect_exit());
info!("Endian = {:?}", endian);
let address =
u64::from_str_radix(matches.value_of(ADDRESS_ARG).unwrap_or("1000"), 16).expect_exit();
info!("Address = 0x{:x}", address);
let input_bytes = if is_hex {
unhexed_bytes(direct_input_bytes)
} else {
direct_input_bytes
};
disasm(
arch,
mode,
extra_mode.iter().copied(),
endian,
input_bytes.as_slice(),
address,
show_detail,
);
}
|
{
info!("Got {} bytes", code.len());
let mut cs = Capstone::new_raw(arch, mode, extra_mode, endian).expect_exit();
if show_detail {
cs.set_detail(true).expect("Failed to set detail");
}
let stdout = io::stdout();
let mut handle = stdout.lock();
for i in cs.disasm_all(code, addr).expect_exit().iter() {
let bytes: Vec<_> = i.bytes().iter().map(|x| format!("{:02x}", x)).collect();
let bytes = bytes.join(" ");
let _ = writeln!(
&mut handle,
"{:-10x}: {:35} {:7} {}",
i.address(),
bytes,
i.mnemonic().unwrap(),
|
identifier_body
|
cstool.rs
|
//! Disassembles machine code
use std::fmt::Display;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::process::exit;
use std::str::FromStr;
use capstone::{self, prelude::*, Arch, Endian, EnumList, ExtraMode, Mode};
use clap::{App, Arg, ArgGroup};
use log::{debug, info};
const DEFAULT_CAPACITY: usize = 1024;
trait ExpectExit<T> {
fn expect_exit(self) -> T;
}
impl<T, E> ExpectExit<T> for Result<T, E>
where
E: Display,
{
fn expect_exit(self) -> T {
match self {
Ok(t) => t,
Err(e) => {
eprintln!("error: {}", e);
exit(1);
}
}
}
}
/// Print register names
fn reg_names(cs: &Capstone, regs: &[RegId]) -> String {
let names: Vec<String> = regs.iter().map(|&x| cs.reg_name(x).unwrap()).collect();
names.join(", ")
}
/// Print instruction group names
fn group_names(cs: &Capstone, regs: &[InsnGroupId]) -> String {
let names: Vec<String> = regs.iter().map(|&x| cs.group_name(x).unwrap()).collect();
names.join(", ")
}
/// Select only hex bytes from input
fn unhexed_bytes(input: Vec<u8>) -> Vec<u8> {
let mut output: Vec<u8> = Vec::new();
let mut curr_byte_str = String::with_capacity(2);
for b_u8 in input {
let b = char::from(b_u8);
if ('0'..='9').contains(&b) || ('a'..='f').contains(&b) || ('A'..='F').contains(&b) {
curr_byte_str.push(b);
}
if curr_byte_str.len() == 2 {
debug!(" curr_byte_str={:?}", curr_byte_str);
let byte = u8::from_str_radix(&curr_byte_str, 16).expect("Unexpect hex parse error");
output.push(byte);
curr_byte_str.clear();
}
}
if log::max_level() >= log::LevelFilter::Info {
let output_hex: Vec<String> = output.iter().map(|x| format!("{:02x}", x)).collect();
info!("unhexed_output = {:?}", output_hex);
}
output
}
fn disasm<T: Iterator<Item = ExtraMode>>(
arch: Arch,
mode: Mode,
extra_mode: T,
endian: Option<Endian>,
code: &[u8],
addr: u64,
show_detail: bool,
) {
info!("Got {} bytes", code.len());
let mut cs = Capstone::new_raw(arch, mode, extra_mode, endian).expect_exit();
if show_detail {
cs.set_detail(true).expect("Failed to set detail");
}
let stdout = io::stdout();
let mut handle = stdout.lock();
for i in cs.disasm_all(code, addr).expect_exit().iter() {
let bytes: Vec<_> = i.bytes().iter().map(|x| format!("{:02x}", x)).collect();
let bytes = bytes.join(" ");
let _ = writeln!(
&mut handle,
"{:-10x}: {:35} {:7} {}",
i.address(),
bytes,
i.mnemonic().unwrap(),
i.op_str().unwrap_or("")
)
.is_ok();
if show_detail {
let detail = cs.insn_detail(i).expect("Failed to get insn detail");
let output: &[(&str, String)] = &[
("insn id:", format!("{:?}", i.id().0)),
("read regs:", reg_names(&cs, detail.regs_read())),
("write regs:", reg_names(&cs, detail.regs_write())),
("insn groups:", group_names(&cs, detail.groups())),
];
for &(ref name, ref message) in output.iter() {
let _ = writeln!(&mut handle, "{:13}{:12} {}", "", name, message).is_ok();
}
}
}
}
const FILE_ARG: &str = "file";
const STDIN_ARG: &str = "stdin";
const CODE_ARG: &str = "code";
const ADDRESS_ARG: &str = "address";
const VERBOSE_ARG: &str = "verbose";
const HEX_ARG: &str = "hex";
const DETAIL_ARG: &str = "detail";
const ARCH_ARG: &str = "arch";
const MODE_ARG: &str = "mode";
const EXTRA_MODE_ARG: &str = "extra";
const ENDIAN_ARG: &str = "endian";
const AFTER_HELP: &str = r#"
Example:
# Disassemble 32-bit X86 (non-hex characters are ignored)
cstool --arch x86 --mode mode32 --code "90 42 e812345678"
1000: 90 nop
1001: 42 inc edx
1002: e8 12 34 56 78 call 0x78564419
"#;
fn
|
() {
// Lowercase arches
let _arches: Vec<String> = Arch::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let arches: Vec<&str> = _arches.iter().map(|x| x.as_str()).collect();
// Lowercase modes
let _modes: Vec<String> = Mode::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let modes: Vec<&str> = _modes.iter().map(|x| x.as_str()).collect();
// Lowercase extra modes
let _extra_modes: Vec<String> = ExtraMode::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let extra_modes: Vec<&str> = _extra_modes.iter().map(|x| x.as_str()).collect();
let matches = App::new("capstone-rs disassembler tool")
.about("Disassembles binary file")
.after_help(AFTER_HELP)
.arg(
Arg::with_name(FILE_ARG)
.short("f")
.long(FILE_ARG)
.help("input file with binary instructions")
.takes_value(true),
)
.arg(
Arg::with_name(STDIN_ARG)
.short("s")
.long(STDIN_ARG)
.help("read binary instructions from stdin")
.takes_value(false),
)
.arg(
Arg::with_name(CODE_ARG)
.short("c")
.long(CODE_ARG)
.help("instruction bytes (implies --hex)")
.takes_value(true),
)
.arg(
Arg::with_name(ADDRESS_ARG)
.short("r")
.long("addr")
.help("address of code")
.takes_value(true),
)
.arg(
Arg::with_name(VERBOSE_ARG)
.short("v")
.multiple(true)
.help("Sets the level of verbosity"),
)
.arg(
Arg::with_name(HEX_ARG)
.short("x")
.long(HEX_ARG)
.help("Treat input has hex; only select characters that are [a-fA-F0-9]")
.takes_value(false),
)
.arg(
Arg::with_name(DETAIL_ARG)
.short("d")
.long(DETAIL_ARG)
.help("Print details about instructions")
.takes_value(false),
)
.arg(
Arg::with_name(ARCH_ARG)
.short("a")
.long(ARCH_ARG)
.help("Architecture")
.takes_value(true)
.required(true)
.possible_values(arches.as_slice())
.case_insensitive(true),
)
.arg(
Arg::with_name(MODE_ARG)
.short("m")
.long(MODE_ARG)
.help(MODE_ARG)
.takes_value(true)
.required(true)
.possible_values(modes.as_slice())
.case_insensitive(true),
)
.arg(
Arg::with_name(EXTRA_MODE_ARG)
.short("e")
.long(EXTRA_MODE_ARG)
.help("Extra Mode")
.takes_value(true)
.required(false)
.possible_values(extra_modes.as_slice())
.case_insensitive(true)
.multiple(true),
)
.arg(
Arg::with_name(ENDIAN_ARG)
.short("n")
.long(ENDIAN_ARG)
.help("Endianness")
.takes_value(true)
.required(false)
.possible_values(&["little", "big"])
.case_insensitive(true),
)
.group(
ArgGroup::with_name("input")
.arg(FILE_ARG)
.arg(STDIN_ARG)
.arg(CODE_ARG)
.required(true),
)
.get_matches();
let direct_input_bytes: Vec<u8> = if let Some(file_path) = matches.value_of(FILE_ARG) {
let mut file = File::open(file_path).expect_exit();
let capacity = match file.metadata() {
Err(_) => DEFAULT_CAPACITY,
Ok(metadata) => metadata.len() as usize,
};
let mut buf = Vec::with_capacity(capacity as usize);
file.read_to_end(&mut buf).expect_exit();
buf
} else if let Some(code) = matches.value_of(CODE_ARG) {
code.as_bytes().iter().copied().collect()
} else {
let mut buf = Vec::with_capacity(DEFAULT_CAPACITY);
let stdin = std::io::stdin();
stdin.lock().read_to_end(&mut buf).expect_exit();
buf
};
stderrlog::new()
.verbosity(matches.occurrences_of(VERBOSE_ARG) as usize)
.init()
.unwrap();
let is_hex = matches.is_present(HEX_ARG) || matches.is_present(CODE_ARG);
info!("is_hex = {:?}", is_hex);
let show_detail = matches.is_present(DETAIL_ARG);
info!("show_detail = {:?}", show_detail);
let arch: Arch = Arch::from_str(matches.value_of(ARCH_ARG).unwrap()).unwrap();
info!("Arch = {:?}", arch);
let mode: Mode = Mode::from_str(matches.value_of(MODE_ARG).unwrap()).unwrap();
info!("Mode = {:?}", mode);
let extra_mode: Vec<_> = match matches.values_of(EXTRA_MODE_ARG) {
None => Vec::with_capacity(0),
Some(x) => x.map(|x| ExtraMode::from_str(x).unwrap()).collect(),
};
info!("ExtraMode = {:?}", extra_mode);
let endian: Option<Endian> = matches
.value_of(ENDIAN_ARG)
.map(|x| Endian::from_str(x).expect_exit());
info!("Endian = {:?}", endian);
let address =
u64::from_str_radix(matches.value_of(ADDRESS_ARG).unwrap_or("1000"), 16).expect_exit();
info!("Address = 0x{:x}", address);
let input_bytes = if is_hex {
unhexed_bytes(direct_input_bytes)
} else {
direct_input_bytes
};
disasm(
arch,
mode,
extra_mode.iter().copied(),
endian,
input_bytes.as_slice(),
address,
show_detail,
);
}
|
main
|
identifier_name
|
cstool.rs
|
//! Disassembles machine code
use std::fmt::Display;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::process::exit;
use std::str::FromStr;
use capstone::{self, prelude::*, Arch, Endian, EnumList, ExtraMode, Mode};
use clap::{App, Arg, ArgGroup};
use log::{debug, info};
const DEFAULT_CAPACITY: usize = 1024;
trait ExpectExit<T> {
fn expect_exit(self) -> T;
}
impl<T, E> ExpectExit<T> for Result<T, E>
where
E: Display,
{
fn expect_exit(self) -> T {
match self {
Ok(t) => t,
Err(e) => {
eprintln!("error: {}", e);
exit(1);
}
}
}
}
/// Print register names
fn reg_names(cs: &Capstone, regs: &[RegId]) -> String {
let names: Vec<String> = regs.iter().map(|&x| cs.reg_name(x).unwrap()).collect();
names.join(", ")
}
/// Print instruction group names
fn group_names(cs: &Capstone, regs: &[InsnGroupId]) -> String {
let names: Vec<String> = regs.iter().map(|&x| cs.group_name(x).unwrap()).collect();
names.join(", ")
}
/// Select only hex bytes from input
fn unhexed_bytes(input: Vec<u8>) -> Vec<u8> {
let mut output: Vec<u8> = Vec::new();
let mut curr_byte_str = String::with_capacity(2);
for b_u8 in input {
let b = char::from(b_u8);
if ('0'..='9').contains(&b) || ('a'..='f').contains(&b) || ('A'..='F').contains(&b) {
curr_byte_str.push(b);
}
if curr_byte_str.len() == 2 {
debug!(" curr_byte_str={:?}", curr_byte_str);
let byte = u8::from_str_radix(&curr_byte_str, 16).expect("Unexpect hex parse error");
output.push(byte);
curr_byte_str.clear();
}
}
if log::max_level() >= log::LevelFilter::Info {
let output_hex: Vec<String> = output.iter().map(|x| format!("{:02x}", x)).collect();
info!("unhexed_output = {:?}", output_hex);
}
output
}
fn disasm<T: Iterator<Item = ExtraMode>>(
arch: Arch,
mode: Mode,
extra_mode: T,
endian: Option<Endian>,
code: &[u8],
addr: u64,
show_detail: bool,
) {
info!("Got {} bytes", code.len());
let mut cs = Capstone::new_raw(arch, mode, extra_mode, endian).expect_exit();
if show_detail {
cs.set_detail(true).expect("Failed to set detail");
}
let stdout = io::stdout();
let mut handle = stdout.lock();
for i in cs.disasm_all(code, addr).expect_exit().iter() {
let bytes: Vec<_> = i.bytes().iter().map(|x| format!("{:02x}", x)).collect();
let bytes = bytes.join(" ");
let _ = writeln!(
&mut handle,
"{:-10x}: {:35} {:7} {}",
i.address(),
bytes,
i.mnemonic().unwrap(),
i.op_str().unwrap_or("")
)
.is_ok();
if show_detail {
let detail = cs.insn_detail(i).expect("Failed to get insn detail");
let output: &[(&str, String)] = &[
("insn id:", format!("{:?}", i.id().0)),
("read regs:", reg_names(&cs, detail.regs_read())),
("write regs:", reg_names(&cs, detail.regs_write())),
("insn groups:", group_names(&cs, detail.groups())),
];
for &(ref name, ref message) in output.iter() {
let _ = writeln!(&mut handle, "{:13}{:12} {}", "", name, message).is_ok();
}
}
}
}
const FILE_ARG: &str = "file";
const STDIN_ARG: &str = "stdin";
const CODE_ARG: &str = "code";
const ADDRESS_ARG: &str = "address";
const VERBOSE_ARG: &str = "verbose";
const HEX_ARG: &str = "hex";
const DETAIL_ARG: &str = "detail";
const ARCH_ARG: &str = "arch";
const MODE_ARG: &str = "mode";
const EXTRA_MODE_ARG: &str = "extra";
const ENDIAN_ARG: &str = "endian";
const AFTER_HELP: &str = r#"
Example:
# Disassemble 32-bit X86 (non-hex characters are ignored)
cstool --arch x86 --mode mode32 --code "90 42 e812345678"
1000: 90 nop
1001: 42 inc edx
1002: e8 12 34 56 78 call 0x78564419
"#;
fn main() {
// Lowercase arches
let _arches: Vec<String> = Arch::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let arches: Vec<&str> = _arches.iter().map(|x| x.as_str()).collect();
// Lowercase modes
let _modes: Vec<String> = Mode::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let modes: Vec<&str> = _modes.iter().map(|x| x.as_str()).collect();
// Lowercase extra modes
let _extra_modes: Vec<String> = ExtraMode::variants()
.iter()
.map(|x| format!("{}", x).to_lowercase())
.collect();
let extra_modes: Vec<&str> = _extra_modes.iter().map(|x| x.as_str()).collect();
let matches = App::new("capstone-rs disassembler tool")
.about("Disassembles binary file")
.after_help(AFTER_HELP)
.arg(
Arg::with_name(FILE_ARG)
.short("f")
.long(FILE_ARG)
.help("input file with binary instructions")
.takes_value(true),
)
.arg(
Arg::with_name(STDIN_ARG)
.short("s")
.long(STDIN_ARG)
.help("read binary instructions from stdin")
.takes_value(false),
)
.arg(
Arg::with_name(CODE_ARG)
.short("c")
.long(CODE_ARG)
.help("instruction bytes (implies --hex)")
.takes_value(true),
)
.arg(
Arg::with_name(ADDRESS_ARG)
.short("r")
.long("addr")
.help("address of code")
.takes_value(true),
)
.arg(
Arg::with_name(VERBOSE_ARG)
.short("v")
.multiple(true)
.help("Sets the level of verbosity"),
)
.arg(
Arg::with_name(HEX_ARG)
.short("x")
.long(HEX_ARG)
.help("Treat input has hex; only select characters that are [a-fA-F0-9]")
.takes_value(false),
)
.arg(
Arg::with_name(DETAIL_ARG)
.short("d")
.long(DETAIL_ARG)
.help("Print details about instructions")
.takes_value(false),
)
.arg(
Arg::with_name(ARCH_ARG)
.short("a")
.long(ARCH_ARG)
.help("Architecture")
.takes_value(true)
.required(true)
.possible_values(arches.as_slice())
.case_insensitive(true),
)
.arg(
Arg::with_name(MODE_ARG)
.short("m")
.long(MODE_ARG)
.help(MODE_ARG)
.takes_value(true)
.required(true)
.possible_values(modes.as_slice())
.case_insensitive(true),
)
.arg(
Arg::with_name(EXTRA_MODE_ARG)
.short("e")
.long(EXTRA_MODE_ARG)
.help("Extra Mode")
.takes_value(true)
.required(false)
.possible_values(extra_modes.as_slice())
.case_insensitive(true)
.multiple(true),
)
.arg(
Arg::with_name(ENDIAN_ARG)
.short("n")
.long(ENDIAN_ARG)
.help("Endianness")
.takes_value(true)
.required(false)
.possible_values(&["little", "big"])
.case_insensitive(true),
)
.group(
ArgGroup::with_name("input")
.arg(FILE_ARG)
.arg(STDIN_ARG)
|
.get_matches();
let direct_input_bytes: Vec<u8> = if let Some(file_path) = matches.value_of(FILE_ARG) {
let mut file = File::open(file_path).expect_exit();
let capacity = match file.metadata() {
Err(_) => DEFAULT_CAPACITY,
Ok(metadata) => metadata.len() as usize,
};
let mut buf = Vec::with_capacity(capacity as usize);
file.read_to_end(&mut buf).expect_exit();
buf
} else if let Some(code) = matches.value_of(CODE_ARG) {
code.as_bytes().iter().copied().collect()
} else {
let mut buf = Vec::with_capacity(DEFAULT_CAPACITY);
let stdin = std::io::stdin();
stdin.lock().read_to_end(&mut buf).expect_exit();
buf
};
stderrlog::new()
.verbosity(matches.occurrences_of(VERBOSE_ARG) as usize)
.init()
.unwrap();
let is_hex = matches.is_present(HEX_ARG) || matches.is_present(CODE_ARG);
info!("is_hex = {:?}", is_hex);
let show_detail = matches.is_present(DETAIL_ARG);
info!("show_detail = {:?}", show_detail);
let arch: Arch = Arch::from_str(matches.value_of(ARCH_ARG).unwrap()).unwrap();
info!("Arch = {:?}", arch);
let mode: Mode = Mode::from_str(matches.value_of(MODE_ARG).unwrap()).unwrap();
info!("Mode = {:?}", mode);
let extra_mode: Vec<_> = match matches.values_of(EXTRA_MODE_ARG) {
None => Vec::with_capacity(0),
Some(x) => x.map(|x| ExtraMode::from_str(x).unwrap()).collect(),
};
info!("ExtraMode = {:?}", extra_mode);
let endian: Option<Endian> = matches
.value_of(ENDIAN_ARG)
.map(|x| Endian::from_str(x).expect_exit());
info!("Endian = {:?}", endian);
let address =
u64::from_str_radix(matches.value_of(ADDRESS_ARG).unwrap_or("1000"), 16).expect_exit();
info!("Address = 0x{:x}", address);
let input_bytes = if is_hex {
unhexed_bytes(direct_input_bytes)
} else {
direct_input_bytes
};
disasm(
arch,
mode,
extra_mode.iter().copied(),
endian,
input_bytes.as_slice(),
address,
show_detail,
);
}
|
.arg(CODE_ARG)
.required(true),
)
|
random_line_split
|
main.rs
|
extern crate rusoto;
use std::{thread, time};
use rusoto::rds::{RdsClient, CreateDBInstanceMessage, DescribeDBInstancesMessage};
use rusoto::{DefaultCredentialsProvider, Region};
use rusoto::default_tls_client;
fn
|
() {
let database_instance_name = "rusototester2";
let credentials = DefaultCredentialsProvider::new().unwrap();
// Security groups in the default VPC will need modification to let you access this from the internet:
let rds_client = RdsClient::new(default_tls_client().unwrap(), credentials, Region::UsEast1);
let create_db_instance_request = CreateDBInstanceMessage {
allocated_storage: Some(5),
backup_retention_period: Some(0),
db_instance_identifier: database_instance_name.to_string(),
db_instance_class: "db.t2.micro".to_string(),
// name and login details should match `.env` in rusoto-rocket
master_user_password: Some("TotallySecurePassword501".to_string()),
master_username: Some("masteruser".to_string()),
db_name: Some("rusotodb".to_string()),
engine: "postgres".to_string(),
multi_az: Some(false),
..Default::default()
};
println!("Going to make the database instance.");
let db_creation_result = rds_client.create_db_instance(&create_db_instance_request).unwrap();
println!("Created! \n\n{:?}", db_creation_result);
// The endpoint isn't available until the DB is created, let's wait for it:
let describe_instances_request = DescribeDBInstancesMessage {
db_instance_identifier: Some(database_instance_name.to_string()),
..Default::default()
};
let endpoint : rusoto::rds::Endpoint;
let ten_seconds = time::Duration::from_millis(10000);
loop {
match rds_client.describe_db_instances(&describe_instances_request).unwrap().db_instances.unwrap()[0].endpoint {
Some(ref endpoint_result) => {
endpoint = endpoint_result.clone();
break;
},
None => {
println!("Waiting for db to be available...");
thread::sleep(ten_seconds);
continue;
},
};
}
let endpoint_address = endpoint.address.unwrap();
let endpoint_port = endpoint.port.unwrap();
println!("\n\nendpoint: {:?}", format!("{}:{}", endpoint_address, endpoint_port));
}
|
main
|
identifier_name
|
main.rs
|
extern crate rusoto;
use std::{thread, time};
use rusoto::rds::{RdsClient, CreateDBInstanceMessage, DescribeDBInstancesMessage};
use rusoto::{DefaultCredentialsProvider, Region};
use rusoto::default_tls_client;
fn main()
|
println!("Going to make the database instance.");
let db_creation_result = rds_client.create_db_instance(&create_db_instance_request).unwrap();
println!("Created! \n\n{:?}", db_creation_result);
// The endpoint isn't available until the DB is created, let's wait for it:
let describe_instances_request = DescribeDBInstancesMessage {
db_instance_identifier: Some(database_instance_name.to_string()),
..Default::default()
};
let endpoint : rusoto::rds::Endpoint;
let ten_seconds = time::Duration::from_millis(10000);
loop {
match rds_client.describe_db_instances(&describe_instances_request).unwrap().db_instances.unwrap()[0].endpoint {
Some(ref endpoint_result) => {
endpoint = endpoint_result.clone();
break;
},
None => {
println!("Waiting for db to be available...");
thread::sleep(ten_seconds);
continue;
},
};
}
let endpoint_address = endpoint.address.unwrap();
let endpoint_port = endpoint.port.unwrap();
println!("\n\nendpoint: {:?}", format!("{}:{}", endpoint_address, endpoint_port));
}
|
{
let database_instance_name = "rusototester2";
let credentials = DefaultCredentialsProvider::new().unwrap();
// Security groups in the default VPC will need modification to let you access this from the internet:
let rds_client = RdsClient::new(default_tls_client().unwrap(), credentials, Region::UsEast1);
let create_db_instance_request = CreateDBInstanceMessage {
allocated_storage: Some(5),
backup_retention_period: Some(0),
db_instance_identifier: database_instance_name.to_string(),
db_instance_class: "db.t2.micro".to_string(),
// name and login details should match `.env` in rusoto-rocket
master_user_password: Some("TotallySecurePassword501".to_string()),
master_username: Some("masteruser".to_string()),
db_name: Some("rusotodb".to_string()),
engine: "postgres".to_string(),
multi_az: Some(false),
..Default::default()
};
|
identifier_body
|
main.rs
|
extern crate rusoto;
use std::{thread, time};
use rusoto::rds::{RdsClient, CreateDBInstanceMessage, DescribeDBInstancesMessage};
use rusoto::{DefaultCredentialsProvider, Region};
use rusoto::default_tls_client;
fn main() {
let database_instance_name = "rusototester2";
let credentials = DefaultCredentialsProvider::new().unwrap();
// Security groups in the default VPC will need modification to let you access this from the internet:
let rds_client = RdsClient::new(default_tls_client().unwrap(), credentials, Region::UsEast1);
let create_db_instance_request = CreateDBInstanceMessage {
allocated_storage: Some(5),
|
backup_retention_period: Some(0),
db_instance_identifier: database_instance_name.to_string(),
db_instance_class: "db.t2.micro".to_string(),
// name and login details should match `.env` in rusoto-rocket
master_user_password: Some("TotallySecurePassword501".to_string()),
master_username: Some("masteruser".to_string()),
db_name: Some("rusotodb".to_string()),
engine: "postgres".to_string(),
multi_az: Some(false),
..Default::default()
};
println!("Going to make the database instance.");
let db_creation_result = rds_client.create_db_instance(&create_db_instance_request).unwrap();
println!("Created! \n\n{:?}", db_creation_result);
// The endpoint isn't available until the DB is created, let's wait for it:
let describe_instances_request = DescribeDBInstancesMessage {
db_instance_identifier: Some(database_instance_name.to_string()),
..Default::default()
};
let endpoint : rusoto::rds::Endpoint;
let ten_seconds = time::Duration::from_millis(10000);
loop {
match rds_client.describe_db_instances(&describe_instances_request).unwrap().db_instances.unwrap()[0].endpoint {
Some(ref endpoint_result) => {
endpoint = endpoint_result.clone();
break;
},
None => {
println!("Waiting for db to be available...");
thread::sleep(ten_seconds);
continue;
},
};
}
let endpoint_address = endpoint.address.unwrap();
let endpoint_port = endpoint.port.unwrap();
println!("\n\nendpoint: {:?}", format!("{}:{}", endpoint_address, endpoint_port));
}
|
random_line_split
|
|
main.rs
|
extern crate rusoto;
use std::{thread, time};
use rusoto::rds::{RdsClient, CreateDBInstanceMessage, DescribeDBInstancesMessage};
use rusoto::{DefaultCredentialsProvider, Region};
use rusoto::default_tls_client;
fn main() {
let database_instance_name = "rusototester2";
let credentials = DefaultCredentialsProvider::new().unwrap();
// Security groups in the default VPC will need modification to let you access this from the internet:
let rds_client = RdsClient::new(default_tls_client().unwrap(), credentials, Region::UsEast1);
let create_db_instance_request = CreateDBInstanceMessage {
allocated_storage: Some(5),
backup_retention_period: Some(0),
db_instance_identifier: database_instance_name.to_string(),
db_instance_class: "db.t2.micro".to_string(),
// name and login details should match `.env` in rusoto-rocket
master_user_password: Some("TotallySecurePassword501".to_string()),
master_username: Some("masteruser".to_string()),
db_name: Some("rusotodb".to_string()),
engine: "postgres".to_string(),
multi_az: Some(false),
..Default::default()
};
println!("Going to make the database instance.");
let db_creation_result = rds_client.create_db_instance(&create_db_instance_request).unwrap();
println!("Created! \n\n{:?}", db_creation_result);
// The endpoint isn't available until the DB is created, let's wait for it:
let describe_instances_request = DescribeDBInstancesMessage {
db_instance_identifier: Some(database_instance_name.to_string()),
..Default::default()
};
let endpoint : rusoto::rds::Endpoint;
let ten_seconds = time::Duration::from_millis(10000);
loop {
match rds_client.describe_db_instances(&describe_instances_request).unwrap().db_instances.unwrap()[0].endpoint {
Some(ref endpoint_result) =>
|
,
None => {
println!("Waiting for db to be available...");
thread::sleep(ten_seconds);
continue;
},
};
}
let endpoint_address = endpoint.address.unwrap();
let endpoint_port = endpoint.port.unwrap();
println!("\n\nendpoint: {:?}", format!("{}:{}", endpoint_address, endpoint_port));
}
|
{
endpoint = endpoint_result.clone();
break;
}
|
conditional_block
|
docker.rs
|
use crate::{command::studio::enter::{ARTIFACT_PATH_ENVVAR,
CERT_PATH_ENVVAR,
SSL_CERT_FILE_ENVVAR},
common::ui::UI,
error::{Error,
Result},
hcore::{crypto::CACHE_KEY_PATH_ENV_VAR,
env as henv,
fs::{CACHE_ARTIFACT_PATH,
CACHE_KEY_PATH,
CACHE_KEY_PATH_POSTFIX,
CACHE_SSL_PATH},
os::process,
package::target,
util::docker},
license,
VERSION};
use std::{env,
ffi::{OsStr,
OsString},
path::{Path,
PathBuf},
process::{Command,
Stdio}};
const DOCKER_IMAGE: &str = "habitat/default-studio";
const DOCKER_WINDOWS_IMAGE: &str = "habitat/win-studio";
const DOCKER_IMAGE_ENVVAR: &str = "HAB_DOCKER_STUDIO_IMAGE";
const DOCKER_OPTS_ENVVAR: &str = "HAB_DOCKER_OPTS";
const DOCKER_SOCKET: &str = "/var/run/docker.sock";
const HAB_STUDIO_SECRET: &str = "HAB_STUDIO_SECRET_";
pub fn start_docker_studio(_ui: &mut UI, args: &[OsString]) -> Result<()> {
let mut args = args.to_vec();
if args.get(0) == Some(&OsString::from("rm")) {
return Err(Error::CannotRemoveDockerStudio);
}
let docker_cmd = docker::command_path()?;
let using_windows_containers = is_serving_windows_containers(&docker_cmd);
let image = image_identifier_for_active_target(using_windows_containers)?;
if is_image_present(&docker_cmd, &image) {
debug!("Found Studio Docker image locally.");
} else {
debug!("Failed to find Studio Docker image locally.");
pull_image(&docker_cmd, &image)?;
}
let mnt_prefix = if using_windows_containers { "c:" } else { "" };
let local_cache_key_path = match henv::var(CACHE_KEY_PATH_ENV_VAR) {
Ok(val) => PathBuf::from(val),
Err(_) => (&*CACHE_KEY_PATH).to_path_buf(),
};
if!local_cache_key_path.exists() {
return Err(Error::FileNotFound(format!("{}\nRun `hab setup` to \
create an origin or use \
`hab origin key` to \
configure your keys.",
local_cache_key_path.display())));
}
let mut volumes = vec![format!("{}:{}{}",
env::current_dir().unwrap().to_string_lossy(),
mnt_prefix,
"/src"),
format!("{}:{}/{}",
local_cache_key_path.display(),
mnt_prefix,
CACHE_KEY_PATH_POSTFIX),];
if let Ok(cache_artifact_path) = henv::var(ARTIFACT_PATH_ENVVAR) {
// Don't use Path::join here as "\" can cause problems in Docker mounts
volumes.push(format!("{}:{}/{}",
cache_artifact_path, mnt_prefix, CACHE_ARTIFACT_PATH));
}
if let Ok(cache_ssl_path) = henv::var(CERT_PATH_ENVVAR) {
// Don't use Path::join here as "\" can cause problems in Docker mounts
volumes.push(format!("{}:{}/{}", cache_ssl_path, mnt_prefix, CACHE_SSL_PATH));
}
if!using_windows_containers
&& (Path::new(DOCKER_SOCKET).exists() || cfg!(target_os = "windows"))
{
volumes.push(format!("{}:{}", DOCKER_SOCKET, DOCKER_SOCKET));
}
let mut env_vars = vec![String::from("DEBUG"),
String::from("DO_CHECK"),
String::from("HAB_AUTH_TOKEN"),
String::from("HAB_BLDR_URL"),
String::from("HAB_BLDR_CHANNEL"),
String::from("HAB_NOCOLORING"),
String::from("HAB_LICENSE"),
String::from("HAB_ORIGIN"),
String::from("HAB_ORIGIN_KEYS"),
String::from("HAB_STUDIO_BACKLINE_PKG"),
String::from("HAB_STUDIO_NOPROFILE"),
String::from("HAB_STUDIO_NOSTUDIORC"),
String::from("HAB_STUDIO_SUP"),
String::from("http_proxy"),
String::from("https_proxy"),
String::from("RUST_LOG"),
String::from("SSL_CERT_FILE"),];
for (key, _) in env::vars() {
if key.starts_with(HAB_STUDIO_SECRET) {
env_vars.push(key);
}
}
// We need to strip out the -D if it exists to avoid
// it getting passed to the sup on entering the studio
let to_cull = OsString::from("-D");
if let Some(index) = args.iter().position(|x| *x == to_cull) {
args.remove(index);
}
// When a user sets SSL_CERT_FILE, we need to modify the absolute
// path to the file to reflect the location of the file inside the studio
update_ssl_cert_file_envvar(mnt_prefix);
// Windows containers do not use filesystem sharing for
// local mounts
if!using_windows_containers {
check_mounts(&docker_cmd, volumes.iter(), &image)?;
}
run_container(docker_cmd,
&args,
volumes.iter(),
env_vars.iter(),
image,
using_windows_containers)
}
fn update_ssl_cert_file_envvar(mnt_prefix: &str) {
if let Ok(ssl_cert_file) = env::var(SSL_CERT_FILE_ENVVAR) {
if let Some(cert_file_name) = Path::new(&ssl_cert_file).file_name() {
if let Some(cert_file_name) = cert_file_name.to_str() {
// Don't use Path::join here in order to work around platform
// differences with paths on Windows with linux containers enabled
env::set_var(SSL_CERT_FILE_ENVVAR,
format!("{}/{}/{}", mnt_prefix, CACHE_SSL_PATH, cert_file_name));
} else {
warn!("Unable to format {:?} for use inside studio", ssl_cert_file);
}
} else {
warn!("Invalid SSL_CERT_FILE value: {:?}", ssl_cert_file);
}
}
}
fn is_image_present(docker_cmd: &Path, image: &str) -> bool {
let mut cmd = Command::new(docker_cmd);
cmd.arg("images").arg(image).arg("-q");
debug!("Running command: {:?}", cmd);
let result = cmd.output().expect("Docker command failed to spawn");
!String::from_utf8_lossy(&result.stdout).as_ref().is_empty()
}
fn is_serving_windows_containers(docker_cmd: &Path) -> bool {
let mut cmd = Command::new(docker_cmd);
cmd.arg("version").arg("--format='{{.Server.Os}}'");
debug!("Running command: {:?}", cmd);
let result = cmd.output().expect("Docker command failed to spawn");
String::from_utf8_lossy(&result.stdout).contains("windows")
}
fn pull_image(docker_cmd: &Path, image: &str) -> Result<()> {
let mut cmd = Command::new(docker_cmd);
cmd.arg("pull")
.arg(image)
.stdout(Stdio::inherit())
.stderr(Stdio::inherit());
debug!("Running command: {:?}", cmd);
let result = cmd.spawn()
.expect("Docker command failed to spawn")
.wait_with_output()
.expect("Failed to wait on child process");
if result.status.success() {
debug!("Docker image '{}' is present locally.", image);
} else {
debug!("Pulling Docker image '{}' failed with exit code: {:?}",
image, result.status);
let err_output = String::from_utf8_lossy(&result.stderr);
if err_output.contains("image") && err_output.contains("not found") {
return Err(Error::DockerImageNotFound(image.to_string()));
} else if err_output.contains("Cannot connect to the Docker daemon") {
return Err(Error::DockerDaemonDown);
} else {
return Err(Error::DockerNetworkDown(image.to_string()));
}
}
Ok(())
}
/// Checks whether or not the volume mounts are working.
///
/// We need to ensure that filesystem sharing has been enabled, otherwise the user will be
/// greeted with a horrible error message that's difficult to make sense of. To mitigate this,
/// we check the studio version. This will cause Docker to go through the mounting steps, so we
/// can watch stderr for failure, but has the advantage of not requiring a TTY.
fn check_mounts<I, S>(docker_cmd: &Path, volumes: I, image: &str) -> Result<()>
where I: IntoIterator<Item = S>,
S: AsRef<OsStr>
{
let mut cmd_args: Vec<OsString> = vec!["run".into(), "--rm".into()];
for vol in volumes {
cmd_args.push("--volume".into());
cmd_args.push(vol.as_ref().into());
}
cmd_args.push(image.into());
cmd_args.push("-V".into());
let version_output = Command::new(docker_cmd).args(&cmd_args)
.output()
.expect("docker failed to start");
let stderr = String::from_utf8(version_output.stderr).unwrap();
if!stderr.is_empty()
&& (stderr.as_str().contains("Mounts denied")
|| stderr.as_str().contains("drive is not shared"))
{
return Err(Error::DockerFileSharingNotEnabled);
}
Ok(())
}
fn run_container<I, J, S, T>(docker_cmd: PathBuf,
args: &[OsString],
volumes: I,
env_vars: J,
image: String,
using_windows_containers: bool)
-> Result<()>
where I: IntoIterator<Item = S>,
J: IntoIterator<Item = T>,
S: AsRef<OsStr>,
T: AsRef<str>
{
let mut cmd_args: Vec<OsString> = vec!["run".into(), "--rm".into()];
if!using_windows_containers {
cmd_args.push("--privileged".into());
}
if atty::is(atty::Stream::Stderr) || atty::is(atty::Stream::Stdout) {
cmd_args.push("--tty".into());
cmd_args.push("--interactive".into());
}
if let Ok(opts) = henv::var(DOCKER_OPTS_ENVVAR) {
let opts = opts
.split_whitespace()
.map(std::convert::Into::into)
// Ensure we're not passing something like `--tty` again here.
.filter(|v|!cmd_args.contains(v))
.collect::<Vec<_>>();
if!opts.is_empty() {
debug!("Adding extra Docker options from {} = {:?}",
DOCKER_OPTS_ENVVAR, opts);
cmd_args.extend_from_slice(opts.as_slice());
}
}
for var in env_vars {
if let Ok(val) = henv::var(var.as_ref()) {
debug!("Setting container env var: {:?}='{}'", var.as_ref(), val);
cmd_args.push("--env".into());
cmd_args.push(format!("{}={}", var.as_ref(), val).into());
} else if var.as_ref() == "HAB_LICENSE" && license::license_exists() {
debug!("Hab license already accepted. Setting container env var: \
HAB_LICENSE=accept-no-persist");
cmd_args.push("--env".into());
cmd_args.push("HAB_LICENSE=accept-no-persist".to_string().into());
}
}
for vol in volumes {
cmd_args.push("--volume".into());
cmd_args.push(vol.as_ref().into());
}
cmd_args.push(image.into());
cmd_args.extend_from_slice(args);
if using_windows_containers {
cmd_args.push("-n".into());
cmd_args.push("-o".into());
cmd_args.push("c:/".into());
}
unset_proxy_env_vars();
process::become_command(docker_cmd, &cmd_args)?;
Ok(())
}
fn unset_proxy_env_vars() {
for var in &["http_proxy", "https_proxy"] {
if henv::var(var).is_ok() {
debug!("Unsetting process environment variable '{}'", var);
env::remove_var(var);
}
}
}
fn image_identifier_for_active_target(using_windows_containers: bool) -> Result<String> {
let windows_base_tag = if using_windows_containers {
Some(docker::default_base_tag_for_host()?)
} else {
None
};
Ok(image_identifier(windows_base_tag, target::PackageTarget::active_target()))
}
/// Returns the Docker Studio image with tag for the desired version which corresponds to the
/// same version (minus release) as this program.
fn
|
(windows_base_tag: Option<&str>, target: target::PackageTarget) -> String {
let version: Vec<&str> = VERSION.split('/').collect();
let (img, tag) = if let Some(t) = windows_base_tag {
(DOCKER_WINDOWS_IMAGE, format!("{}-{}", t, version[0]))
} else {
(DOCKER_IMAGE, version[0].to_string())
};
let studio_target = studio_target(windows_base_tag.is_some(), target);
henv::var(DOCKER_IMAGE_ENVVAR).unwrap_or_else(|_| format!("{}-{}:{}", img, studio_target, tag))
}
// Determine what studio target to use for a given target. This uses the various target feature
// flags to conditionally compile with the correct target symbols. Other parts of the code that
// change behavior based on the available targets should use a similar technique. The docker
// exporter code is one example. However, the docker exporter currently uses `#[cfg(unix)]` and
// `#[cfg(windows)]`. This should potentially be changed.
fn studio_target(windows: bool, target: target::PackageTarget) -> target::PackageTarget {
if windows {
#[cfg(feature = "supported_targets")]
return target::X86_64_WINDOWS;
}
match target {
#[cfg(feature = "supported_targets")]
target::X86_64_DARWIN => target::X86_64_LINUX,
#[cfg(feature = "supported_targets")]
target::X86_64_LINUX => target::X86_64_LINUX,
#[cfg(feature = "supported_targets")]
target::X86_64_LINUX_KERNEL2 => target::X86_64_LINUX_KERNEL2,
#[cfg(feature = "supported_targets")]
target::X86_64_WINDOWS => target::X86_64_LINUX,
#[cfg(feature = "supported_targets")]
target::AARCH64_DARWIN => target::X86_64_LINUX,
#[cfg(feature = "aarch64-linux")]
target::AARCH64_LINUX => panic!("{} studios are not supported", target::AARCH64_LINUX),
// This is only needed for the case that we have no target enabled. In that case, we get a
// non-exhaustive patterns error because the match statement is empty.
#[cfg(not(any(feature = "supported_targets", feature = "aarch64-linux")))]
_ => unreachable!(),
}
}
#[cfg(test)]
mod tests {
use super::{image_identifier,
update_ssl_cert_file_envvar,
DOCKER_IMAGE,
DOCKER_WINDOWS_IMAGE};
use crate::VERSION;
use crate::{command::studio::enter::SSL_CERT_FILE_ENVVAR,
hcore::{fs::CACHE_SSL_PATH,
package::target}};
habitat_core::locked_env_var!(SSL_CERT_FILE, lock_ssl_cert_file_env_var);
#[test]
#[cfg(feature = "supported_targets")]
fn retrieve_supported_image_identifier() {
assert_eq!(image_identifier(None, target::X86_64_DARWIN),
format!("{}-{}:{}", DOCKER_IMAGE, "x86_64-linux", VERSION));
assert_eq!(image_identifier(None, target::X86_64_LINUX),
format!("{}-{}:{}", DOCKER_IMAGE, "x86_64-linux", VERSION));
assert_eq!(image_identifier(None, target::X86_64_LINUX_KERNEL2),
format!("{}-{}:{}", DOCKER_IMAGE, "x86_64-linux-kernel2", VERSION));
assert_eq!(image_identifier(None, target::X86_64_WINDOWS),
format!("{}-{}:{}", DOCKER_IMAGE, "x86_64-linux", VERSION));
assert_eq!(image_identifier(Some("ltsc2016"), target::X86_64_WINDOWS),
format!("{}-{}:{}-{}",
DOCKER_WINDOWS_IMAGE, "x86_64-windows", "ltsc2016", VERSION));
assert_eq!(image_identifier(Some("ltsc2016"), target::X86_64_LINUX),
format!("{}-{}:{}-{}",
DOCKER_WINDOWS_IMAGE, "x86_64-windows", "ltsc2016", VERSION));
}
#[should_panic]
#[cfg(feature = "aarch64-linux")]
fn retrieve_aarch64_image_identifier() { image_identifier(None, target::AARCH64_LINUX); }
#[test]
fn update_ssl_cert_file_envvar_not_set() {
let mnt_prefix = "";
let env_var = lock_ssl_cert_file_env_var();
env_var.unset();
update_ssl_cert_file_envvar(mnt_prefix);
assert!(std::env::var(SSL_CERT_FILE_ENVVAR).is_err());
}
#[test]
fn update_ssl_cert_file_envvar_has_value() {
let mnt_prefix = "/some/prefix";
let key_name = "ssl-test-cert.pem";
let env_var = lock_ssl_cert_file_env_var();
env_var.set(format!("/path/to/{}", key_name));
update_ssl_cert_file_envvar(mnt_prefix);
// Don't use Path::join here because we format! the path above,
// in order to work around platform differences with paths on
// windows with linux containers enabled
let internal_cert_path = format!("{}/{}/{}", mnt_prefix, CACHE_SSL_PATH, key_name);
assert_eq!(std::env::var(SSL_CERT_FILE_ENVVAR), Ok(internal_cert_path));
}
}
|
image_identifier
|
identifier_name
|
docker.rs
|
use crate::{command::studio::enter::{ARTIFACT_PATH_ENVVAR,
CERT_PATH_ENVVAR,
SSL_CERT_FILE_ENVVAR},
common::ui::UI,
error::{Error,
Result},
hcore::{crypto::CACHE_KEY_PATH_ENV_VAR,
env as henv,
fs::{CACHE_ARTIFACT_PATH,
CACHE_KEY_PATH,
CACHE_KEY_PATH_POSTFIX,
CACHE_SSL_PATH},
os::process,
package::target,
util::docker},
license,
VERSION};
use std::{env,
ffi::{OsStr,
OsString},
path::{Path,
PathBuf},
process::{Command,
Stdio}};
const DOCKER_IMAGE: &str = "habitat/default-studio";
const DOCKER_WINDOWS_IMAGE: &str = "habitat/win-studio";
const DOCKER_IMAGE_ENVVAR: &str = "HAB_DOCKER_STUDIO_IMAGE";
const DOCKER_OPTS_ENVVAR: &str = "HAB_DOCKER_OPTS";
const DOCKER_SOCKET: &str = "/var/run/docker.sock";
const HAB_STUDIO_SECRET: &str = "HAB_STUDIO_SECRET_";
pub fn start_docker_studio(_ui: &mut UI, args: &[OsString]) -> Result<()> {
let mut args = args.to_vec();
if args.get(0) == Some(&OsString::from("rm")) {
return Err(Error::CannotRemoveDockerStudio);
}
let docker_cmd = docker::command_path()?;
let using_windows_containers = is_serving_windows_containers(&docker_cmd);
let image = image_identifier_for_active_target(using_windows_containers)?;
if is_image_present(&docker_cmd, &image) {
debug!("Found Studio Docker image locally.");
} else {
debug!("Failed to find Studio Docker image locally.");
pull_image(&docker_cmd, &image)?;
}
let mnt_prefix = if using_windows_containers { "c:" } else { "" };
let local_cache_key_path = match henv::var(CACHE_KEY_PATH_ENV_VAR) {
Ok(val) => PathBuf::from(val),
Err(_) => (&*CACHE_KEY_PATH).to_path_buf(),
};
if!local_cache_key_path.exists() {
return Err(Error::FileNotFound(format!("{}\nRun `hab setup` to \
create an origin or use \
`hab origin key` to \
configure your keys.",
local_cache_key_path.display())));
}
let mut volumes = vec![format!("{}:{}{}",
env::current_dir().unwrap().to_string_lossy(),
mnt_prefix,
"/src"),
format!("{}:{}/{}",
local_cache_key_path.display(),
mnt_prefix,
CACHE_KEY_PATH_POSTFIX),];
if let Ok(cache_artifact_path) = henv::var(ARTIFACT_PATH_ENVVAR) {
// Don't use Path::join here as "\" can cause problems in Docker mounts
volumes.push(format!("{}:{}/{}",
cache_artifact_path, mnt_prefix, CACHE_ARTIFACT_PATH));
}
if let Ok(cache_ssl_path) = henv::var(CERT_PATH_ENVVAR) {
// Don't use Path::join here as "\" can cause problems in Docker mounts
volumes.push(format!("{}:{}/{}", cache_ssl_path, mnt_prefix, CACHE_SSL_PATH));
}
if!using_windows_containers
&& (Path::new(DOCKER_SOCKET).exists() || cfg!(target_os = "windows"))
{
volumes.push(format!("{}:{}", DOCKER_SOCKET, DOCKER_SOCKET));
}
let mut env_vars = vec![String::from("DEBUG"),
String::from("DO_CHECK"),
String::from("HAB_AUTH_TOKEN"),
String::from("HAB_BLDR_URL"),
String::from("HAB_BLDR_CHANNEL"),
String::from("HAB_NOCOLORING"),
String::from("HAB_LICENSE"),
String::from("HAB_ORIGIN"),
String::from("HAB_ORIGIN_KEYS"),
String::from("HAB_STUDIO_BACKLINE_PKG"),
String::from("HAB_STUDIO_NOPROFILE"),
String::from("HAB_STUDIO_NOSTUDIORC"),
String::from("HAB_STUDIO_SUP"),
String::from("http_proxy"),
String::from("https_proxy"),
String::from("RUST_LOG"),
String::from("SSL_CERT_FILE"),];
for (key, _) in env::vars() {
if key.starts_with(HAB_STUDIO_SECRET) {
env_vars.push(key);
}
}
// We need to strip out the -D if it exists to avoid
// it getting passed to the sup on entering the studio
let to_cull = OsString::from("-D");
if let Some(index) = args.iter().position(|x| *x == to_cull) {
args.remove(index);
}
// When a user sets SSL_CERT_FILE, we need to modify the absolute
// path to the file to reflect the location of the file inside the studio
update_ssl_cert_file_envvar(mnt_prefix);
// Windows containers do not use filesystem sharing for
// local mounts
if!using_windows_containers {
check_mounts(&docker_cmd, volumes.iter(), &image)?;
}
run_container(docker_cmd,
&args,
volumes.iter(),
env_vars.iter(),
image,
using_windows_containers)
}
fn update_ssl_cert_file_envvar(mnt_prefix: &str) {
if let Ok(ssl_cert_file) = env::var(SSL_CERT_FILE_ENVVAR) {
if let Some(cert_file_name) = Path::new(&ssl_cert_file).file_name() {
if let Some(cert_file_name) = cert_file_name.to_str() {
// Don't use Path::join here in order to work around platform
// differences with paths on Windows with linux containers enabled
env::set_var(SSL_CERT_FILE_ENVVAR,
format!("{}/{}/{}", mnt_prefix, CACHE_SSL_PATH, cert_file_name));
} else {
warn!("Unable to format {:?} for use inside studio", ssl_cert_file);
}
} else {
warn!("Invalid SSL_CERT_FILE value: {:?}", ssl_cert_file);
}
}
}
fn is_image_present(docker_cmd: &Path, image: &str) -> bool {
let mut cmd = Command::new(docker_cmd);
cmd.arg("images").arg(image).arg("-q");
debug!("Running command: {:?}", cmd);
let result = cmd.output().expect("Docker command failed to spawn");
!String::from_utf8_lossy(&result.stdout).as_ref().is_empty()
}
fn is_serving_windows_containers(docker_cmd: &Path) -> bool {
let mut cmd = Command::new(docker_cmd);
cmd.arg("version").arg("--format='{{.Server.Os}}'");
debug!("Running command: {:?}", cmd);
let result = cmd.output().expect("Docker command failed to spawn");
String::from_utf8_lossy(&result.stdout).contains("windows")
}
fn pull_image(docker_cmd: &Path, image: &str) -> Result<()> {
let mut cmd = Command::new(docker_cmd);
cmd.arg("pull")
.arg(image)
.stdout(Stdio::inherit())
.stderr(Stdio::inherit());
debug!("Running command: {:?}", cmd);
let result = cmd.spawn()
.expect("Docker command failed to spawn")
.wait_with_output()
.expect("Failed to wait on child process");
if result.status.success() {
debug!("Docker image '{}' is present locally.", image);
} else {
debug!("Pulling Docker image '{}' failed with exit code: {:?}",
image, result.status);
let err_output = String::from_utf8_lossy(&result.stderr);
if err_output.contains("image") && err_output.contains("not found") {
return Err(Error::DockerImageNotFound(image.to_string()));
} else if err_output.contains("Cannot connect to the Docker daemon") {
return Err(Error::DockerDaemonDown);
} else {
return Err(Error::DockerNetworkDown(image.to_string()));
}
}
Ok(())
}
/// Checks whether or not the volume mounts are working.
///
/// We need to ensure that filesystem sharing has been enabled, otherwise the user will be
/// greeted with a horrible error message that's difficult to make sense of. To mitigate this,
/// we check the studio version. This will cause Docker to go through the mounting steps, so we
/// can watch stderr for failure, but has the advantage of not requiring a TTY.
fn check_mounts<I, S>(docker_cmd: &Path, volumes: I, image: &str) -> Result<()>
where I: IntoIterator<Item = S>,
S: AsRef<OsStr>
{
let mut cmd_args: Vec<OsString> = vec!["run".into(), "--rm".into()];
for vol in volumes {
cmd_args.push("--volume".into());
cmd_args.push(vol.as_ref().into());
}
cmd_args.push(image.into());
cmd_args.push("-V".into());
let version_output = Command::new(docker_cmd).args(&cmd_args)
.output()
.expect("docker failed to start");
let stderr = String::from_utf8(version_output.stderr).unwrap();
if!stderr.is_empty()
&& (stderr.as_str().contains("Mounts denied")
|| stderr.as_str().contains("drive is not shared"))
{
return Err(Error::DockerFileSharingNotEnabled);
}
Ok(())
}
fn run_container<I, J, S, T>(docker_cmd: PathBuf,
args: &[OsString],
volumes: I,
env_vars: J,
image: String,
using_windows_containers: bool)
-> Result<()>
where I: IntoIterator<Item = S>,
J: IntoIterator<Item = T>,
S: AsRef<OsStr>,
T: AsRef<str>
{
let mut cmd_args: Vec<OsString> = vec!["run".into(), "--rm".into()];
if!using_windows_containers {
cmd_args.push("--privileged".into());
}
if atty::is(atty::Stream::Stderr) || atty::is(atty::Stream::Stdout) {
cmd_args.push("--tty".into());
cmd_args.push("--interactive".into());
}
if let Ok(opts) = henv::var(DOCKER_OPTS_ENVVAR) {
let opts = opts
.split_whitespace()
.map(std::convert::Into::into)
// Ensure we're not passing something like `--tty` again here.
.filter(|v|!cmd_args.contains(v))
.collect::<Vec<_>>();
if!opts.is_empty() {
debug!("Adding extra Docker options from {} = {:?}",
DOCKER_OPTS_ENVVAR, opts);
cmd_args.extend_from_slice(opts.as_slice());
}
}
for var in env_vars {
if let Ok(val) = henv::var(var.as_ref()) {
debug!("Setting container env var: {:?}='{}'", var.as_ref(), val);
cmd_args.push("--env".into());
cmd_args.push(format!("{}={}", var.as_ref(), val).into());
} else if var.as_ref() == "HAB_LICENSE" && license::license_exists() {
debug!("Hab license already accepted. Setting container env var: \
HAB_LICENSE=accept-no-persist");
cmd_args.push("--env".into());
cmd_args.push("HAB_LICENSE=accept-no-persist".to_string().into());
}
}
for vol in volumes {
cmd_args.push("--volume".into());
cmd_args.push(vol.as_ref().into());
}
cmd_args.push(image.into());
cmd_args.extend_from_slice(args);
if using_windows_containers {
cmd_args.push("-n".into());
cmd_args.push("-o".into());
cmd_args.push("c:/".into());
}
unset_proxy_env_vars();
process::become_command(docker_cmd, &cmd_args)?;
Ok(())
}
fn unset_proxy_env_vars() {
for var in &["http_proxy", "https_proxy"] {
if henv::var(var).is_ok() {
debug!("Unsetting process environment variable '{}'", var);
env::remove_var(var);
}
}
}
fn image_identifier_for_active_target(using_windows_containers: bool) -> Result<String> {
let windows_base_tag = if using_windows_containers {
Some(docker::default_base_tag_for_host()?)
} else {
None
};
Ok(image_identifier(windows_base_tag, target::PackageTarget::active_target()))
}
/// Returns the Docker Studio image with tag for the desired version which corresponds to the
/// same version (minus release) as this program.
fn image_identifier(windows_base_tag: Option<&str>, target: target::PackageTarget) -> String {
let version: Vec<&str> = VERSION.split('/').collect();
let (img, tag) = if let Some(t) = windows_base_tag {
(DOCKER_WINDOWS_IMAGE, format!("{}-{}", t, version[0]))
} else {
(DOCKER_IMAGE, version[0].to_string())
};
let studio_target = studio_target(windows_base_tag.is_some(), target);
henv::var(DOCKER_IMAGE_ENVVAR).unwrap_or_else(|_| format!("{}-{}:{}", img, studio_target, tag))
}
// Determine what studio target to use for a given target. This uses the various target feature
// flags to conditionally compile with the correct target symbols. Other parts of the code that
// change behavior based on the available targets should use a similar technique. The docker
// exporter code is one example. However, the docker exporter currently uses `#[cfg(unix)]` and
// `#[cfg(windows)]`. This should potentially be changed.
fn studio_target(windows: bool, target: target::PackageTarget) -> target::PackageTarget {
if windows {
#[cfg(feature = "supported_targets")]
return target::X86_64_WINDOWS;
}
match target {
#[cfg(feature = "supported_targets")]
target::X86_64_DARWIN => target::X86_64_LINUX,
#[cfg(feature = "supported_targets")]
target::X86_64_LINUX => target::X86_64_LINUX,
#[cfg(feature = "supported_targets")]
target::X86_64_LINUX_KERNEL2 => target::X86_64_LINUX_KERNEL2,
#[cfg(feature = "supported_targets")]
target::X86_64_WINDOWS => target::X86_64_LINUX,
#[cfg(feature = "supported_targets")]
target::AARCH64_DARWIN => target::X86_64_LINUX,
#[cfg(feature = "aarch64-linux")]
target::AARCH64_LINUX => panic!("{} studios are not supported", target::AARCH64_LINUX),
// This is only needed for the case that we have no target enabled. In that case, we get a
// non-exhaustive patterns error because the match statement is empty.
#[cfg(not(any(feature = "supported_targets", feature = "aarch64-linux")))]
_ => unreachable!(),
}
}
#[cfg(test)]
mod tests {
use super::{image_identifier,
update_ssl_cert_file_envvar,
DOCKER_IMAGE,
DOCKER_WINDOWS_IMAGE};
use crate::VERSION;
|
package::target}};
habitat_core::locked_env_var!(SSL_CERT_FILE, lock_ssl_cert_file_env_var);
#[test]
#[cfg(feature = "supported_targets")]
fn retrieve_supported_image_identifier() {
assert_eq!(image_identifier(None, target::X86_64_DARWIN),
format!("{}-{}:{}", DOCKER_IMAGE, "x86_64-linux", VERSION));
assert_eq!(image_identifier(None, target::X86_64_LINUX),
format!("{}-{}:{}", DOCKER_IMAGE, "x86_64-linux", VERSION));
assert_eq!(image_identifier(None, target::X86_64_LINUX_KERNEL2),
format!("{}-{}:{}", DOCKER_IMAGE, "x86_64-linux-kernel2", VERSION));
assert_eq!(image_identifier(None, target::X86_64_WINDOWS),
format!("{}-{}:{}", DOCKER_IMAGE, "x86_64-linux", VERSION));
assert_eq!(image_identifier(Some("ltsc2016"), target::X86_64_WINDOWS),
format!("{}-{}:{}-{}",
DOCKER_WINDOWS_IMAGE, "x86_64-windows", "ltsc2016", VERSION));
assert_eq!(image_identifier(Some("ltsc2016"), target::X86_64_LINUX),
format!("{}-{}:{}-{}",
DOCKER_WINDOWS_IMAGE, "x86_64-windows", "ltsc2016", VERSION));
}
#[should_panic]
#[cfg(feature = "aarch64-linux")]
fn retrieve_aarch64_image_identifier() { image_identifier(None, target::AARCH64_LINUX); }
#[test]
fn update_ssl_cert_file_envvar_not_set() {
let mnt_prefix = "";
let env_var = lock_ssl_cert_file_env_var();
env_var.unset();
update_ssl_cert_file_envvar(mnt_prefix);
assert!(std::env::var(SSL_CERT_FILE_ENVVAR).is_err());
}
#[test]
fn update_ssl_cert_file_envvar_has_value() {
let mnt_prefix = "/some/prefix";
let key_name = "ssl-test-cert.pem";
let env_var = lock_ssl_cert_file_env_var();
env_var.set(format!("/path/to/{}", key_name));
update_ssl_cert_file_envvar(mnt_prefix);
// Don't use Path::join here because we format! the path above,
// in order to work around platform differences with paths on
// windows with linux containers enabled
let internal_cert_path = format!("{}/{}/{}", mnt_prefix, CACHE_SSL_PATH, key_name);
assert_eq!(std::env::var(SSL_CERT_FILE_ENVVAR), Ok(internal_cert_path));
}
}
|
use crate::{command::studio::enter::SSL_CERT_FILE_ENVVAR,
hcore::{fs::CACHE_SSL_PATH,
|
random_line_split
|
docker.rs
|
use crate::{command::studio::enter::{ARTIFACT_PATH_ENVVAR,
CERT_PATH_ENVVAR,
SSL_CERT_FILE_ENVVAR},
common::ui::UI,
error::{Error,
Result},
hcore::{crypto::CACHE_KEY_PATH_ENV_VAR,
env as henv,
fs::{CACHE_ARTIFACT_PATH,
CACHE_KEY_PATH,
CACHE_KEY_PATH_POSTFIX,
CACHE_SSL_PATH},
os::process,
package::target,
util::docker},
license,
VERSION};
use std::{env,
ffi::{OsStr,
OsString},
path::{Path,
PathBuf},
process::{Command,
Stdio}};
const DOCKER_IMAGE: &str = "habitat/default-studio";
const DOCKER_WINDOWS_IMAGE: &str = "habitat/win-studio";
const DOCKER_IMAGE_ENVVAR: &str = "HAB_DOCKER_STUDIO_IMAGE";
const DOCKER_OPTS_ENVVAR: &str = "HAB_DOCKER_OPTS";
const DOCKER_SOCKET: &str = "/var/run/docker.sock";
const HAB_STUDIO_SECRET: &str = "HAB_STUDIO_SECRET_";
pub fn start_docker_studio(_ui: &mut UI, args: &[OsString]) -> Result<()> {
let mut args = args.to_vec();
if args.get(0) == Some(&OsString::from("rm")) {
return Err(Error::CannotRemoveDockerStudio);
}
let docker_cmd = docker::command_path()?;
let using_windows_containers = is_serving_windows_containers(&docker_cmd);
let image = image_identifier_for_active_target(using_windows_containers)?;
if is_image_present(&docker_cmd, &image) {
debug!("Found Studio Docker image locally.");
} else {
debug!("Failed to find Studio Docker image locally.");
pull_image(&docker_cmd, &image)?;
}
let mnt_prefix = if using_windows_containers { "c:" } else { "" };
let local_cache_key_path = match henv::var(CACHE_KEY_PATH_ENV_VAR) {
Ok(val) => PathBuf::from(val),
Err(_) => (&*CACHE_KEY_PATH).to_path_buf(),
};
if!local_cache_key_path.exists() {
return Err(Error::FileNotFound(format!("{}\nRun `hab setup` to \
create an origin or use \
`hab origin key` to \
configure your keys.",
local_cache_key_path.display())));
}
let mut volumes = vec![format!("{}:{}{}",
env::current_dir().unwrap().to_string_lossy(),
mnt_prefix,
"/src"),
format!("{}:{}/{}",
local_cache_key_path.display(),
mnt_prefix,
CACHE_KEY_PATH_POSTFIX),];
if let Ok(cache_artifact_path) = henv::var(ARTIFACT_PATH_ENVVAR) {
// Don't use Path::join here as "\" can cause problems in Docker mounts
volumes.push(format!("{}:{}/{}",
cache_artifact_path, mnt_prefix, CACHE_ARTIFACT_PATH));
}
if let Ok(cache_ssl_path) = henv::var(CERT_PATH_ENVVAR) {
// Don't use Path::join here as "\" can cause problems in Docker mounts
volumes.push(format!("{}:{}/{}", cache_ssl_path, mnt_prefix, CACHE_SSL_PATH));
}
if!using_windows_containers
&& (Path::new(DOCKER_SOCKET).exists() || cfg!(target_os = "windows"))
{
volumes.push(format!("{}:{}", DOCKER_SOCKET, DOCKER_SOCKET));
}
let mut env_vars = vec![String::from("DEBUG"),
String::from("DO_CHECK"),
String::from("HAB_AUTH_TOKEN"),
String::from("HAB_BLDR_URL"),
String::from("HAB_BLDR_CHANNEL"),
String::from("HAB_NOCOLORING"),
String::from("HAB_LICENSE"),
String::from("HAB_ORIGIN"),
String::from("HAB_ORIGIN_KEYS"),
String::from("HAB_STUDIO_BACKLINE_PKG"),
String::from("HAB_STUDIO_NOPROFILE"),
String::from("HAB_STUDIO_NOSTUDIORC"),
String::from("HAB_STUDIO_SUP"),
String::from("http_proxy"),
String::from("https_proxy"),
String::from("RUST_LOG"),
String::from("SSL_CERT_FILE"),];
for (key, _) in env::vars() {
if key.starts_with(HAB_STUDIO_SECRET) {
env_vars.push(key);
}
}
// We need to strip out the -D if it exists to avoid
// it getting passed to the sup on entering the studio
let to_cull = OsString::from("-D");
if let Some(index) = args.iter().position(|x| *x == to_cull) {
args.remove(index);
}
// When a user sets SSL_CERT_FILE, we need to modify the absolute
// path to the file to reflect the location of the file inside the studio
update_ssl_cert_file_envvar(mnt_prefix);
// Windows containers do not use filesystem sharing for
// local mounts
if!using_windows_containers {
check_mounts(&docker_cmd, volumes.iter(), &image)?;
}
run_container(docker_cmd,
&args,
volumes.iter(),
env_vars.iter(),
image,
using_windows_containers)
}
fn update_ssl_cert_file_envvar(mnt_prefix: &str) {
if let Ok(ssl_cert_file) = env::var(SSL_CERT_FILE_ENVVAR) {
if let Some(cert_file_name) = Path::new(&ssl_cert_file).file_name() {
if let Some(cert_file_name) = cert_file_name.to_str() {
// Don't use Path::join here in order to work around platform
// differences with paths on Windows with linux containers enabled
env::set_var(SSL_CERT_FILE_ENVVAR,
format!("{}/{}/{}", mnt_prefix, CACHE_SSL_PATH, cert_file_name));
} else {
warn!("Unable to format {:?} for use inside studio", ssl_cert_file);
}
} else {
warn!("Invalid SSL_CERT_FILE value: {:?}", ssl_cert_file);
}
}
}
fn is_image_present(docker_cmd: &Path, image: &str) -> bool {
let mut cmd = Command::new(docker_cmd);
cmd.arg("images").arg(image).arg("-q");
debug!("Running command: {:?}", cmd);
let result = cmd.output().expect("Docker command failed to spawn");
!String::from_utf8_lossy(&result.stdout).as_ref().is_empty()
}
fn is_serving_windows_containers(docker_cmd: &Path) -> bool
|
fn pull_image(docker_cmd: &Path, image: &str) -> Result<()> {
let mut cmd = Command::new(docker_cmd);
cmd.arg("pull")
.arg(image)
.stdout(Stdio::inherit())
.stderr(Stdio::inherit());
debug!("Running command: {:?}", cmd);
let result = cmd.spawn()
.expect("Docker command failed to spawn")
.wait_with_output()
.expect("Failed to wait on child process");
if result.status.success() {
debug!("Docker image '{}' is present locally.", image);
} else {
debug!("Pulling Docker image '{}' failed with exit code: {:?}",
image, result.status);
let err_output = String::from_utf8_lossy(&result.stderr);
if err_output.contains("image") && err_output.contains("not found") {
return Err(Error::DockerImageNotFound(image.to_string()));
} else if err_output.contains("Cannot connect to the Docker daemon") {
return Err(Error::DockerDaemonDown);
} else {
return Err(Error::DockerNetworkDown(image.to_string()));
}
}
Ok(())
}
/// Checks whether or not the volume mounts are working.
///
/// We need to ensure that filesystem sharing has been enabled, otherwise the user will be
/// greeted with a horrible error message that's difficult to make sense of. To mitigate this,
/// we check the studio version. This will cause Docker to go through the mounting steps, so we
/// can watch stderr for failure, but has the advantage of not requiring a TTY.
fn check_mounts<I, S>(docker_cmd: &Path, volumes: I, image: &str) -> Result<()>
where I: IntoIterator<Item = S>,
S: AsRef<OsStr>
{
let mut cmd_args: Vec<OsString> = vec!["run".into(), "--rm".into()];
for vol in volumes {
cmd_args.push("--volume".into());
cmd_args.push(vol.as_ref().into());
}
cmd_args.push(image.into());
cmd_args.push("-V".into());
let version_output = Command::new(docker_cmd).args(&cmd_args)
.output()
.expect("docker failed to start");
let stderr = String::from_utf8(version_output.stderr).unwrap();
if!stderr.is_empty()
&& (stderr.as_str().contains("Mounts denied")
|| stderr.as_str().contains("drive is not shared"))
{
return Err(Error::DockerFileSharingNotEnabled);
}
Ok(())
}
fn run_container<I, J, S, T>(docker_cmd: PathBuf,
args: &[OsString],
volumes: I,
env_vars: J,
image: String,
using_windows_containers: bool)
-> Result<()>
where I: IntoIterator<Item = S>,
J: IntoIterator<Item = T>,
S: AsRef<OsStr>,
T: AsRef<str>
{
let mut cmd_args: Vec<OsString> = vec!["run".into(), "--rm".into()];
if!using_windows_containers {
cmd_args.push("--privileged".into());
}
if atty::is(atty::Stream::Stderr) || atty::is(atty::Stream::Stdout) {
cmd_args.push("--tty".into());
cmd_args.push("--interactive".into());
}
if let Ok(opts) = henv::var(DOCKER_OPTS_ENVVAR) {
let opts = opts
.split_whitespace()
.map(std::convert::Into::into)
// Ensure we're not passing something like `--tty` again here.
.filter(|v|!cmd_args.contains(v))
.collect::<Vec<_>>();
if!opts.is_empty() {
debug!("Adding extra Docker options from {} = {:?}",
DOCKER_OPTS_ENVVAR, opts);
cmd_args.extend_from_slice(opts.as_slice());
}
}
for var in env_vars {
if let Ok(val) = henv::var(var.as_ref()) {
debug!("Setting container env var: {:?}='{}'", var.as_ref(), val);
cmd_args.push("--env".into());
cmd_args.push(format!("{}={}", var.as_ref(), val).into());
} else if var.as_ref() == "HAB_LICENSE" && license::license_exists() {
debug!("Hab license already accepted. Setting container env var: \
HAB_LICENSE=accept-no-persist");
cmd_args.push("--env".into());
cmd_args.push("HAB_LICENSE=accept-no-persist".to_string().into());
}
}
for vol in volumes {
cmd_args.push("--volume".into());
cmd_args.push(vol.as_ref().into());
}
cmd_args.push(image.into());
cmd_args.extend_from_slice(args);
if using_windows_containers {
cmd_args.push("-n".into());
cmd_args.push("-o".into());
cmd_args.push("c:/".into());
}
unset_proxy_env_vars();
process::become_command(docker_cmd, &cmd_args)?;
Ok(())
}
fn unset_proxy_env_vars() {
for var in &["http_proxy", "https_proxy"] {
if henv::var(var).is_ok() {
debug!("Unsetting process environment variable '{}'", var);
env::remove_var(var);
}
}
}
fn image_identifier_for_active_target(using_windows_containers: bool) -> Result<String> {
let windows_base_tag = if using_windows_containers {
Some(docker::default_base_tag_for_host()?)
} else {
None
};
Ok(image_identifier(windows_base_tag, target::PackageTarget::active_target()))
}
/// Returns the Docker Studio image with tag for the desired version which corresponds to the
/// same version (minus release) as this program.
fn image_identifier(windows_base_tag: Option<&str>, target: target::PackageTarget) -> String {
let version: Vec<&str> = VERSION.split('/').collect();
let (img, tag) = if let Some(t) = windows_base_tag {
(DOCKER_WINDOWS_IMAGE, format!("{}-{}", t, version[0]))
} else {
(DOCKER_IMAGE, version[0].to_string())
};
let studio_target = studio_target(windows_base_tag.is_some(), target);
henv::var(DOCKER_IMAGE_ENVVAR).unwrap_or_else(|_| format!("{}-{}:{}", img, studio_target, tag))
}
// Determine what studio target to use for a given target. This uses the various target feature
// flags to conditionally compile with the correct target symbols. Other parts of the code that
// change behavior based on the available targets should use a similar technique. The docker
// exporter code is one example. However, the docker exporter currently uses `#[cfg(unix)]` and
// `#[cfg(windows)]`. This should potentially be changed.
fn studio_target(windows: bool, target: target::PackageTarget) -> target::PackageTarget {
if windows {
#[cfg(feature = "supported_targets")]
return target::X86_64_WINDOWS;
}
match target {
#[cfg(feature = "supported_targets")]
target::X86_64_DARWIN => target::X86_64_LINUX,
#[cfg(feature = "supported_targets")]
target::X86_64_LINUX => target::X86_64_LINUX,
#[cfg(feature = "supported_targets")]
target::X86_64_LINUX_KERNEL2 => target::X86_64_LINUX_KERNEL2,
#[cfg(feature = "supported_targets")]
target::X86_64_WINDOWS => target::X86_64_LINUX,
#[cfg(feature = "supported_targets")]
target::AARCH64_DARWIN => target::X86_64_LINUX,
#[cfg(feature = "aarch64-linux")]
target::AARCH64_LINUX => panic!("{} studios are not supported", target::AARCH64_LINUX),
// This is only needed for the case that we have no target enabled. In that case, we get a
// non-exhaustive patterns error because the match statement is empty.
#[cfg(not(any(feature = "supported_targets", feature = "aarch64-linux")))]
_ => unreachable!(),
}
}
#[cfg(test)]
mod tests {
use super::{image_identifier,
update_ssl_cert_file_envvar,
DOCKER_IMAGE,
DOCKER_WINDOWS_IMAGE};
use crate::VERSION;
use crate::{command::studio::enter::SSL_CERT_FILE_ENVVAR,
hcore::{fs::CACHE_SSL_PATH,
package::target}};
habitat_core::locked_env_var!(SSL_CERT_FILE, lock_ssl_cert_file_env_var);
#[test]
#[cfg(feature = "supported_targets")]
fn retrieve_supported_image_identifier() {
assert_eq!(image_identifier(None, target::X86_64_DARWIN),
format!("{}-{}:{}", DOCKER_IMAGE, "x86_64-linux", VERSION));
assert_eq!(image_identifier(None, target::X86_64_LINUX),
format!("{}-{}:{}", DOCKER_IMAGE, "x86_64-linux", VERSION));
assert_eq!(image_identifier(None, target::X86_64_LINUX_KERNEL2),
format!("{}-{}:{}", DOCKER_IMAGE, "x86_64-linux-kernel2", VERSION));
assert_eq!(image_identifier(None, target::X86_64_WINDOWS),
format!("{}-{}:{}", DOCKER_IMAGE, "x86_64-linux", VERSION));
assert_eq!(image_identifier(Some("ltsc2016"), target::X86_64_WINDOWS),
format!("{}-{}:{}-{}",
DOCKER_WINDOWS_IMAGE, "x86_64-windows", "ltsc2016", VERSION));
assert_eq!(image_identifier(Some("ltsc2016"), target::X86_64_LINUX),
format!("{}-{}:{}-{}",
DOCKER_WINDOWS_IMAGE, "x86_64-windows", "ltsc2016", VERSION));
}
#[should_panic]
#[cfg(feature = "aarch64-linux")]
fn retrieve_aarch64_image_identifier() { image_identifier(None, target::AARCH64_LINUX); }
#[test]
fn update_ssl_cert_file_envvar_not_set() {
let mnt_prefix = "";
let env_var = lock_ssl_cert_file_env_var();
env_var.unset();
update_ssl_cert_file_envvar(mnt_prefix);
assert!(std::env::var(SSL_CERT_FILE_ENVVAR).is_err());
}
#[test]
fn update_ssl_cert_file_envvar_has_value() {
let mnt_prefix = "/some/prefix";
let key_name = "ssl-test-cert.pem";
let env_var = lock_ssl_cert_file_env_var();
env_var.set(format!("/path/to/{}", key_name));
update_ssl_cert_file_envvar(mnt_prefix);
// Don't use Path::join here because we format! the path above,
// in order to work around platform differences with paths on
// windows with linux containers enabled
let internal_cert_path = format!("{}/{}/{}", mnt_prefix, CACHE_SSL_PATH, key_name);
assert_eq!(std::env::var(SSL_CERT_FILE_ENVVAR), Ok(internal_cert_path));
}
}
|
{
let mut cmd = Command::new(docker_cmd);
cmd.arg("version").arg("--format='{{.Server.Os}}'");
debug!("Running command: {:?}", cmd);
let result = cmd.output().expect("Docker command failed to spawn");
String::from_utf8_lossy(&result.stdout).contains("windows")
}
|
identifier_body
|
extra.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Code for managing the layout data in the DOM.
use util::{PrivateLayoutData, LayoutDataAccess, LayoutDataWrapper};
use wrapper::LayoutNode;
use script::dom::node::SharedLayoutData;
use script::layout_interface::LayoutChan;
/// Functionality useful for querying the layout-specific data on DOM nodes.
pub trait LayoutAuxMethods {
fn initialize_layout_data(&self, chan: LayoutChan);
fn initialize_style_for_subtree(&self, chan: LayoutChan);
}
impl<'ln> LayoutAuxMethods for LayoutNode<'ln> {
/// Resets layout data and styles for the node.
///
/// FIXME(pcwalton): Do this as part of fragment building instead of in a traversal.
fn initialize_layout_data(&self, chan: LayoutChan) {
let mut layout_data_ref = self.mutate_layout_data();
match *layout_data_ref {
None => {
*layout_data_ref = Some(LayoutDataWrapper {
chan: Some(chan),
shared_data: SharedLayoutData { style: None },
data: box PrivateLayoutData::new(),
});
}
Some(_) => {}
}
}
/// Resets layout data and styles for a Node tree.
///
/// FIXME(pcwalton): Do this as part of fragment building instead of in a traversal.
fn initialize_style_for_subtree(&self, chan: LayoutChan) {
for n in self.traverse_preorder() {
n.initialize_layout_data(chan.clone());
}
}
|
}
|
random_line_split
|
|
extra.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Code for managing the layout data in the DOM.
use util::{PrivateLayoutData, LayoutDataAccess, LayoutDataWrapper};
use wrapper::LayoutNode;
use script::dom::node::SharedLayoutData;
use script::layout_interface::LayoutChan;
/// Functionality useful for querying the layout-specific data on DOM nodes.
pub trait LayoutAuxMethods {
fn initialize_layout_data(&self, chan: LayoutChan);
fn initialize_style_for_subtree(&self, chan: LayoutChan);
}
impl<'ln> LayoutAuxMethods for LayoutNode<'ln> {
/// Resets layout data and styles for the node.
///
/// FIXME(pcwalton): Do this as part of fragment building instead of in a traversal.
fn initialize_layout_data(&self, chan: LayoutChan) {
let mut layout_data_ref = self.mutate_layout_data();
match *layout_data_ref {
None => {
*layout_data_ref = Some(LayoutDataWrapper {
chan: Some(chan),
shared_data: SharedLayoutData { style: None },
data: box PrivateLayoutData::new(),
});
}
Some(_) => {}
}
}
/// Resets layout data and styles for a Node tree.
///
/// FIXME(pcwalton): Do this as part of fragment building instead of in a traversal.
fn
|
(&self, chan: LayoutChan) {
for n in self.traverse_preorder() {
n.initialize_layout_data(chan.clone());
}
}
}
|
initialize_style_for_subtree
|
identifier_name
|
extra.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Code for managing the layout data in the DOM.
use util::{PrivateLayoutData, LayoutDataAccess, LayoutDataWrapper};
use wrapper::LayoutNode;
use script::dom::node::SharedLayoutData;
use script::layout_interface::LayoutChan;
/// Functionality useful for querying the layout-specific data on DOM nodes.
pub trait LayoutAuxMethods {
fn initialize_layout_data(&self, chan: LayoutChan);
fn initialize_style_for_subtree(&self, chan: LayoutChan);
}
impl<'ln> LayoutAuxMethods for LayoutNode<'ln> {
/// Resets layout data and styles for the node.
///
/// FIXME(pcwalton): Do this as part of fragment building instead of in a traversal.
fn initialize_layout_data(&self, chan: LayoutChan)
|
/// Resets layout data and styles for a Node tree.
///
/// FIXME(pcwalton): Do this as part of fragment building instead of in a traversal.
fn initialize_style_for_subtree(&self, chan: LayoutChan) {
for n in self.traverse_preorder() {
n.initialize_layout_data(chan.clone());
}
}
}
|
{
let mut layout_data_ref = self.mutate_layout_data();
match *layout_data_ref {
None => {
*layout_data_ref = Some(LayoutDataWrapper {
chan: Some(chan),
shared_data: SharedLayoutData { style: None },
data: box PrivateLayoutData::new(),
});
}
Some(_) => {}
}
}
|
identifier_body
|
macros.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// See docs of the `procedural-masquerade` crate.
define_invoke_proc_macro!(cssparser_internal__invoke_proc_macro);
/// Expands to a `match` expression with string patterns,
/// matching case-insensitively in the ASCII range.
///
/// The patterns must not contain ASCII upper case letters. (They must be already be lower-cased.)
///
/// # Example
///
/// ```rust
/// #[macro_use] extern crate cssparser;
///
/// # fn main() {} // Make doctest not wrap everythig in its own main
/// # fn dummy(function_name: &String) { let _ =
/// match_ignore_ascii_case! { &function_name,
/// "rgb" => parse_rgb(..),
/// "rgba" => parse_rgba(..),
/// "hsl" => parse_hsl(..),
/// "hsla" => parse_hsla(..),
/// _ => Err(format!("unknown function: {}", function_name))
/// }
/// # ;}
/// # use std::ops::RangeFull;
/// # fn parse_rgb(_: RangeFull) -> Result<(), String> { Ok(()) }
/// # fn parse_rgba(_: RangeFull) -> Result<(), String> { Ok(()) }
/// # fn parse_hsl(_: RangeFull) -> Result<(), String> { Ok(()) }
/// # fn parse_hsla(_: RangeFull) -> Result<(), String> { Ok(()) }
/// ```
#[macro_export]
macro_rules! match_ignore_ascii_case {
( $input:expr, $( $match_body:tt )* ) => {
{
cssparser_internal__invoke_proc_macro! {
cssparser_internal__assert_ascii_lowercase__max_len!( $( $match_body )* )
}
{
// MAX_LENGTH is generated by cssparser_internal__assert_ascii_lowercase__max_len
cssparser_internal__to_lowercase!($input, MAX_LENGTH => lowercase);
// "A" is a short string that we know is different for every string pattern,
// since we’ve verified that none of them include ASCII upper case letters.
match lowercase.unwrap_or("A") {
$( $match_body )*
}
}
}
};
}
/// Define a function `$name(&str) -> Option<&'static $ValueType>`
///
/// The function finds a match for the input string
/// in a [`phf` map](https://github.com/sfackler/rust-phf)
/// and returns a reference to the corresponding value.
/// Matching is case-insensitive in the ASCII range.
///
/// ## Example:
///
/// ```rust
/// #[macro_use] extern crate cssparser;
///
/// # fn main() {} // Make doctest not wrap everything in its own main
///
/// fn color_rgb(input: &str) -> Option<(u8, u8, u8)> {
/// ascii_case_insensitive_phf_map! {
/// keyword -> (u8, u8, u8) = {
/// "red" => (255, 0, 0),
/// "green" => (0, 255, 0),
/// "blue" => (0, 0, 255),
/// }
/// }
/// keyword(input).cloned()
/// }
#[macro_export]
macro_rules! ascii_case_insensitive_phf_map {
($name: ident -> $ValueType: ty = { $( $key: expr => $value: expr ),* }) => {
ascii_case_insensitive_phf_map!($name -> $ValueType = { $( $key => $value, )* })
};
($name: ident -> $ValueType: ty = { $( $key: expr => $value: expr, )* }) => {
pub fn $name(input: &str) -> Option<&'static $ValueType> {
cssparser_internal__invoke_proc_macro! {
cssparser_internal__phf_map!( ($ValueType) $( $key ($value) )+ )
}
{
cssparser_internal__invoke_proc_macro! {
cssparser_internal__max_len!( $( $key )+ )
}
// MAX_LENGTH is generated by cssparser_internal__max_len
cssparser_internal__to_lowercase!(input, MAX_LENGTH => lowercase);
lowercase.and_then(|s| MAP.get(s))
}
}
}
}
/// Implementation detail of match_ignore_ascii_case! and ascii_case_insensitive_phf_map! macros.
///
/// **This macro is not part of the public API. It can change or be removed between any versions.**
///
/// Define a local variable named `$output`
/// and assign it the result of calling `_internal__to_lowercase`
/// with a stack-allocated buffer of length `$BUFFER_SIZE`.
#[macro_export]
#[doc(hidden)]
macro_rules! cssparser_internal__to_lowercase {
($input: expr, $BUFFER_SIZE: expr => $output: ident) => {
// mem::uninitialized() is ok because `buffer` is only used in `_internal__to_lowercase`,
// which initializes with `copy_from_slice` the part of the buffer it uses,
// before it uses it.
#[allow(unsafe_code)]
let mut buffer: [u8; $BUFFER_SIZE] = unsafe {
::std::mem::uninitialized()
};
let input: &str = $input;
let $output = $crate::_internal__to_lowercase(&mut buffer, input);
}
}
/// Implementation detail of match_ignore_ascii_case! and ascii_case_insensitive_phf_map! macros.
///
/// **This function is not part of the public API. It can change or be removed between any verisons.**
///
/// If `input` is larger than buffer, return `None`.
/// Otherwise, return `input` ASCII-lowercased, using `buffer` as temporary space if necessary.
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn _i
|
a>(buffer: &'a mut [u8], input: &'a str) -> Option<&'a str> {
if let Some(buffer) = buffer.get_mut(..input.len()) {
if let Some(first_uppercase) = input.bytes().position(|byte| matches!(byte, b'A'...b'Z')) {
buffer.copy_from_slice(input.as_bytes());
::std::ascii::AsciiExt::make_ascii_lowercase(&mut buffer[first_uppercase..]);
// `buffer` was initialized to a copy of `input` (which is &str so well-formed UTF-8)
// then lowercased (which preserves UTF-8 well-formedness)
unsafe {
Some(::std::str::from_utf8_unchecked(buffer))
}
} else {
// Input is already lower-case
Some(input)
}
} else {
// Input is longer than buffer, which has the length of the longest expected string:
// none of the expected strings would match.
None
}
}
#[cfg(feature = "dummy_match_byte")]
macro_rules! match_byte {
($value:expr, $($rest:tt)* ) => {
match $value {
$(
$rest
)+
}
};
}
|
nternal__to_lowercase<'
|
identifier_name
|
macros.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// See docs of the `procedural-masquerade` crate.
define_invoke_proc_macro!(cssparser_internal__invoke_proc_macro);
/// Expands to a `match` expression with string patterns,
/// matching case-insensitively in the ASCII range.
///
/// The patterns must not contain ASCII upper case letters. (They must be already be lower-cased.)
///
/// # Example
///
/// ```rust
/// #[macro_use] extern crate cssparser;
///
/// # fn main() {} // Make doctest not wrap everythig in its own main
/// # fn dummy(function_name: &String) { let _ =
/// match_ignore_ascii_case! { &function_name,
/// "rgb" => parse_rgb(..),
/// "rgba" => parse_rgba(..),
/// "hsl" => parse_hsl(..),
/// "hsla" => parse_hsla(..),
/// _ => Err(format!("unknown function: {}", function_name))
/// }
/// # ;}
/// # use std::ops::RangeFull;
/// # fn parse_rgb(_: RangeFull) -> Result<(), String> { Ok(()) }
/// # fn parse_rgba(_: RangeFull) -> Result<(), String> { Ok(()) }
/// # fn parse_hsl(_: RangeFull) -> Result<(), String> { Ok(()) }
/// # fn parse_hsla(_: RangeFull) -> Result<(), String> { Ok(()) }
/// ```
#[macro_export]
macro_rules! match_ignore_ascii_case {
( $input:expr, $( $match_body:tt )* ) => {
{
cssparser_internal__invoke_proc_macro! {
cssparser_internal__assert_ascii_lowercase__max_len!( $( $match_body )* )
}
{
// MAX_LENGTH is generated by cssparser_internal__assert_ascii_lowercase__max_len
cssparser_internal__to_lowercase!($input, MAX_LENGTH => lowercase);
// "A" is a short string that we know is different for every string pattern,
// since we’ve verified that none of them include ASCII upper case letters.
match lowercase.unwrap_or("A") {
$( $match_body )*
}
}
}
};
}
/// Define a function `$name(&str) -> Option<&'static $ValueType>`
///
/// The function finds a match for the input string
/// in a [`phf` map](https://github.com/sfackler/rust-phf)
/// and returns a reference to the corresponding value.
/// Matching is case-insensitive in the ASCII range.
///
/// ## Example:
///
/// ```rust
/// #[macro_use] extern crate cssparser;
///
/// # fn main() {} // Make doctest not wrap everything in its own main
///
/// fn color_rgb(input: &str) -> Option<(u8, u8, u8)> {
/// ascii_case_insensitive_phf_map! {
/// keyword -> (u8, u8, u8) = {
/// "red" => (255, 0, 0),
/// "green" => (0, 255, 0),
/// "blue" => (0, 0, 255),
/// }
/// }
/// keyword(input).cloned()
/// }
#[macro_export]
macro_rules! ascii_case_insensitive_phf_map {
($name: ident -> $ValueType: ty = { $( $key: expr => $value: expr ),* }) => {
ascii_case_insensitive_phf_map!($name -> $ValueType = { $( $key => $value, )* })
};
($name: ident -> $ValueType: ty = { $( $key: expr => $value: expr, )* }) => {
pub fn $name(input: &str) -> Option<&'static $ValueType> {
cssparser_internal__invoke_proc_macro! {
cssparser_internal__phf_map!( ($ValueType) $( $key ($value) )+ )
}
{
cssparser_internal__invoke_proc_macro! {
cssparser_internal__max_len!( $( $key )+ )
}
// MAX_LENGTH is generated by cssparser_internal__max_len
cssparser_internal__to_lowercase!(input, MAX_LENGTH => lowercase);
lowercase.and_then(|s| MAP.get(s))
}
}
}
}
/// Implementation detail of match_ignore_ascii_case! and ascii_case_insensitive_phf_map! macros.
///
/// **This macro is not part of the public API. It can change or be removed between any versions.**
///
/// Define a local variable named `$output`
/// and assign it the result of calling `_internal__to_lowercase`
/// with a stack-allocated buffer of length `$BUFFER_SIZE`.
#[macro_export]
#[doc(hidden)]
macro_rules! cssparser_internal__to_lowercase {
($input: expr, $BUFFER_SIZE: expr => $output: ident) => {
// mem::uninitialized() is ok because `buffer` is only used in `_internal__to_lowercase`,
// which initializes with `copy_from_slice` the part of the buffer it uses,
// before it uses it.
#[allow(unsafe_code)]
let mut buffer: [u8; $BUFFER_SIZE] = unsafe {
::std::mem::uninitialized()
};
let input: &str = $input;
let $output = $crate::_internal__to_lowercase(&mut buffer, input);
}
}
/// Implementation detail of match_ignore_ascii_case! and ascii_case_insensitive_phf_map! macros.
///
/// **This function is not part of the public API. It can change or be removed between any verisons.**
///
/// If `input` is larger than buffer, return `None`.
/// Otherwise, return `input` ASCII-lowercased, using `buffer` as temporary space if necessary.
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn _internal__to_lowercase<'a>(buffer: &'a mut [u8], input: &'a str) -> Option<&'a str> {
|
#[cfg(feature = "dummy_match_byte")]
macro_rules! match_byte {
($value:expr, $($rest:tt)* ) => {
match $value {
$(
$rest
)+
}
};
}
|
if let Some(buffer) = buffer.get_mut(..input.len()) {
if let Some(first_uppercase) = input.bytes().position(|byte| matches!(byte, b'A'...b'Z')) {
buffer.copy_from_slice(input.as_bytes());
::std::ascii::AsciiExt::make_ascii_lowercase(&mut buffer[first_uppercase..]);
// `buffer` was initialized to a copy of `input` (which is &str so well-formed UTF-8)
// then lowercased (which preserves UTF-8 well-formedness)
unsafe {
Some(::std::str::from_utf8_unchecked(buffer))
}
} else {
// Input is already lower-case
Some(input)
}
} else {
// Input is longer than buffer, which has the length of the longest expected string:
// none of the expected strings would match.
None
}
}
|
identifier_body
|
macros.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// See docs of the `procedural-masquerade` crate.
define_invoke_proc_macro!(cssparser_internal__invoke_proc_macro);
/// Expands to a `match` expression with string patterns,
/// matching case-insensitively in the ASCII range.
///
/// The patterns must not contain ASCII upper case letters. (They must be already be lower-cased.)
///
/// # Example
///
/// ```rust
/// #[macro_use] extern crate cssparser;
///
/// # fn main() {} // Make doctest not wrap everythig in its own main
/// # fn dummy(function_name: &String) { let _ =
/// match_ignore_ascii_case! { &function_name,
/// "rgb" => parse_rgb(..),
/// "rgba" => parse_rgba(..),
/// "hsl" => parse_hsl(..),
/// "hsla" => parse_hsla(..),
/// _ => Err(format!("unknown function: {}", function_name))
/// }
/// # ;}
/// # use std::ops::RangeFull;
/// # fn parse_rgb(_: RangeFull) -> Result<(), String> { Ok(()) }
/// # fn parse_rgba(_: RangeFull) -> Result<(), String> { Ok(()) }
/// # fn parse_hsl(_: RangeFull) -> Result<(), String> { Ok(()) }
|
( $input:expr, $( $match_body:tt )* ) => {
{
cssparser_internal__invoke_proc_macro! {
cssparser_internal__assert_ascii_lowercase__max_len!( $( $match_body )* )
}
{
// MAX_LENGTH is generated by cssparser_internal__assert_ascii_lowercase__max_len
cssparser_internal__to_lowercase!($input, MAX_LENGTH => lowercase);
// "A" is a short string that we know is different for every string pattern,
// since we’ve verified that none of them include ASCII upper case letters.
match lowercase.unwrap_or("A") {
$( $match_body )*
}
}
}
};
}
/// Define a function `$name(&str) -> Option<&'static $ValueType>`
///
/// The function finds a match for the input string
/// in a [`phf` map](https://github.com/sfackler/rust-phf)
/// and returns a reference to the corresponding value.
/// Matching is case-insensitive in the ASCII range.
///
/// ## Example:
///
/// ```rust
/// #[macro_use] extern crate cssparser;
///
/// # fn main() {} // Make doctest not wrap everything in its own main
///
/// fn color_rgb(input: &str) -> Option<(u8, u8, u8)> {
/// ascii_case_insensitive_phf_map! {
/// keyword -> (u8, u8, u8) = {
/// "red" => (255, 0, 0),
/// "green" => (0, 255, 0),
/// "blue" => (0, 0, 255),
/// }
/// }
/// keyword(input).cloned()
/// }
#[macro_export]
macro_rules! ascii_case_insensitive_phf_map {
($name: ident -> $ValueType: ty = { $( $key: expr => $value: expr ),* }) => {
ascii_case_insensitive_phf_map!($name -> $ValueType = { $( $key => $value, )* })
};
($name: ident -> $ValueType: ty = { $( $key: expr => $value: expr, )* }) => {
pub fn $name(input: &str) -> Option<&'static $ValueType> {
cssparser_internal__invoke_proc_macro! {
cssparser_internal__phf_map!( ($ValueType) $( $key ($value) )+ )
}
{
cssparser_internal__invoke_proc_macro! {
cssparser_internal__max_len!( $( $key )+ )
}
// MAX_LENGTH is generated by cssparser_internal__max_len
cssparser_internal__to_lowercase!(input, MAX_LENGTH => lowercase);
lowercase.and_then(|s| MAP.get(s))
}
}
}
}
/// Implementation detail of match_ignore_ascii_case! and ascii_case_insensitive_phf_map! macros.
///
/// **This macro is not part of the public API. It can change or be removed between any versions.**
///
/// Define a local variable named `$output`
/// and assign it the result of calling `_internal__to_lowercase`
/// with a stack-allocated buffer of length `$BUFFER_SIZE`.
#[macro_export]
#[doc(hidden)]
macro_rules! cssparser_internal__to_lowercase {
($input: expr, $BUFFER_SIZE: expr => $output: ident) => {
// mem::uninitialized() is ok because `buffer` is only used in `_internal__to_lowercase`,
// which initializes with `copy_from_slice` the part of the buffer it uses,
// before it uses it.
#[allow(unsafe_code)]
let mut buffer: [u8; $BUFFER_SIZE] = unsafe {
::std::mem::uninitialized()
};
let input: &str = $input;
let $output = $crate::_internal__to_lowercase(&mut buffer, input);
}
}
/// Implementation detail of match_ignore_ascii_case! and ascii_case_insensitive_phf_map! macros.
///
/// **This function is not part of the public API. It can change or be removed between any verisons.**
///
/// If `input` is larger than buffer, return `None`.
/// Otherwise, return `input` ASCII-lowercased, using `buffer` as temporary space if necessary.
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn _internal__to_lowercase<'a>(buffer: &'a mut [u8], input: &'a str) -> Option<&'a str> {
if let Some(buffer) = buffer.get_mut(..input.len()) {
if let Some(first_uppercase) = input.bytes().position(|byte| matches!(byte, b'A'...b'Z')) {
buffer.copy_from_slice(input.as_bytes());
::std::ascii::AsciiExt::make_ascii_lowercase(&mut buffer[first_uppercase..]);
// `buffer` was initialized to a copy of `input` (which is &str so well-formed UTF-8)
// then lowercased (which preserves UTF-8 well-formedness)
unsafe {
Some(::std::str::from_utf8_unchecked(buffer))
}
} else {
// Input is already lower-case
Some(input)
}
} else {
// Input is longer than buffer, which has the length of the longest expected string:
// none of the expected strings would match.
None
}
}
#[cfg(feature = "dummy_match_byte")]
macro_rules! match_byte {
($value:expr, $($rest:tt)* ) => {
match $value {
$(
$rest
)+
}
};
}
|
/// # fn parse_hsla(_: RangeFull) -> Result<(), String> { Ok(()) }
/// ```
#[macro_export]
macro_rules! match_ignore_ascii_case {
|
random_line_split
|
macros.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// See docs of the `procedural-masquerade` crate.
define_invoke_proc_macro!(cssparser_internal__invoke_proc_macro);
/// Expands to a `match` expression with string patterns,
/// matching case-insensitively in the ASCII range.
///
/// The patterns must not contain ASCII upper case letters. (They must be already be lower-cased.)
///
/// # Example
///
/// ```rust
/// #[macro_use] extern crate cssparser;
///
/// # fn main() {} // Make doctest not wrap everythig in its own main
/// # fn dummy(function_name: &String) { let _ =
/// match_ignore_ascii_case! { &function_name,
/// "rgb" => parse_rgb(..),
/// "rgba" => parse_rgba(..),
/// "hsl" => parse_hsl(..),
/// "hsla" => parse_hsla(..),
/// _ => Err(format!("unknown function: {}", function_name))
/// }
/// # ;}
/// # use std::ops::RangeFull;
/// # fn parse_rgb(_: RangeFull) -> Result<(), String> { Ok(()) }
/// # fn parse_rgba(_: RangeFull) -> Result<(), String> { Ok(()) }
/// # fn parse_hsl(_: RangeFull) -> Result<(), String> { Ok(()) }
/// # fn parse_hsla(_: RangeFull) -> Result<(), String> { Ok(()) }
/// ```
#[macro_export]
macro_rules! match_ignore_ascii_case {
( $input:expr, $( $match_body:tt )* ) => {
{
cssparser_internal__invoke_proc_macro! {
cssparser_internal__assert_ascii_lowercase__max_len!( $( $match_body )* )
}
{
// MAX_LENGTH is generated by cssparser_internal__assert_ascii_lowercase__max_len
cssparser_internal__to_lowercase!($input, MAX_LENGTH => lowercase);
// "A" is a short string that we know is different for every string pattern,
// since we’ve verified that none of them include ASCII upper case letters.
match lowercase.unwrap_or("A") {
$( $match_body )*
}
}
}
};
}
/// Define a function `$name(&str) -> Option<&'static $ValueType>`
///
/// The function finds a match for the input string
/// in a [`phf` map](https://github.com/sfackler/rust-phf)
/// and returns a reference to the corresponding value.
/// Matching is case-insensitive in the ASCII range.
///
/// ## Example:
///
/// ```rust
/// #[macro_use] extern crate cssparser;
///
/// # fn main() {} // Make doctest not wrap everything in its own main
///
/// fn color_rgb(input: &str) -> Option<(u8, u8, u8)> {
/// ascii_case_insensitive_phf_map! {
/// keyword -> (u8, u8, u8) = {
/// "red" => (255, 0, 0),
/// "green" => (0, 255, 0),
/// "blue" => (0, 0, 255),
/// }
/// }
/// keyword(input).cloned()
/// }
#[macro_export]
macro_rules! ascii_case_insensitive_phf_map {
($name: ident -> $ValueType: ty = { $( $key: expr => $value: expr ),* }) => {
ascii_case_insensitive_phf_map!($name -> $ValueType = { $( $key => $value, )* })
};
($name: ident -> $ValueType: ty = { $( $key: expr => $value: expr, )* }) => {
pub fn $name(input: &str) -> Option<&'static $ValueType> {
cssparser_internal__invoke_proc_macro! {
cssparser_internal__phf_map!( ($ValueType) $( $key ($value) )+ )
}
{
cssparser_internal__invoke_proc_macro! {
cssparser_internal__max_len!( $( $key )+ )
}
// MAX_LENGTH is generated by cssparser_internal__max_len
cssparser_internal__to_lowercase!(input, MAX_LENGTH => lowercase);
lowercase.and_then(|s| MAP.get(s))
}
}
}
}
/// Implementation detail of match_ignore_ascii_case! and ascii_case_insensitive_phf_map! macros.
///
/// **This macro is not part of the public API. It can change or be removed between any versions.**
///
/// Define a local variable named `$output`
/// and assign it the result of calling `_internal__to_lowercase`
/// with a stack-allocated buffer of length `$BUFFER_SIZE`.
#[macro_export]
#[doc(hidden)]
macro_rules! cssparser_internal__to_lowercase {
($input: expr, $BUFFER_SIZE: expr => $output: ident) => {
// mem::uninitialized() is ok because `buffer` is only used in `_internal__to_lowercase`,
// which initializes with `copy_from_slice` the part of the buffer it uses,
// before it uses it.
#[allow(unsafe_code)]
let mut buffer: [u8; $BUFFER_SIZE] = unsafe {
::std::mem::uninitialized()
};
let input: &str = $input;
let $output = $crate::_internal__to_lowercase(&mut buffer, input);
}
}
/// Implementation detail of match_ignore_ascii_case! and ascii_case_insensitive_phf_map! macros.
///
/// **This function is not part of the public API. It can change or be removed between any verisons.**
///
/// If `input` is larger than buffer, return `None`.
/// Otherwise, return `input` ASCII-lowercased, using `buffer` as temporary space if necessary.
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn _internal__to_lowercase<'a>(buffer: &'a mut [u8], input: &'a str) -> Option<&'a str> {
if let Some(buffer) = buffer.get_mut(..input.len()) {
if let Some(first_uppercase) = input.bytes().position(|byte| matches!(byte, b'A'...b'Z')) {
|
lse {
// Input is already lower-case
Some(input)
}
} else {
// Input is longer than buffer, which has the length of the longest expected string:
// none of the expected strings would match.
None
}
}
#[cfg(feature = "dummy_match_byte")]
macro_rules! match_byte {
($value:expr, $($rest:tt)* ) => {
match $value {
$(
$rest
)+
}
};
}
|
buffer.copy_from_slice(input.as_bytes());
::std::ascii::AsciiExt::make_ascii_lowercase(&mut buffer[first_uppercase..]);
// `buffer` was initialized to a copy of `input` (which is &str so well-formed UTF-8)
// then lowercased (which preserves UTF-8 well-formedness)
unsafe {
Some(::std::str::from_utf8_unchecked(buffer))
}
} e
|
conditional_block
|
thrift_build.rs
|
// @generated by autocargo
use std::env;
use std::fs;
use std::path::Path;
use thrift_compiler::Config;
#[rustfmt::skip]
fn main() {
// Rerun if this gets rewritten.
println!("cargo:rerun-if-changed=thrift_build.rs");
let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided");
let out_dir: &Path = out_dir.as_ref();
fs::write(
out_dir.join("cratemap"),
"eden_config crate",
).expect("Failed to write cratemap");
let conf = {
let mut conf = Config::from_env().expect("Failed to instantiate thrift_compiler::Config");
let path_from_manifest_to_base: &Path = "../../..".as_ref();
let cargo_manifest_dir =
env::var_os("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR not provided");
let cargo_manifest_dir: &Path = cargo_manifest_dir.as_ref();
let base_path = cargo_manifest_dir
.join(path_from_manifest_to_base)
.canonicalize()
.expect("Failed to canonicalize base_path");
// TODO: replace canonicalize() with std::path::absolute() when
// https://github.com/rust-lang/rust/pull/91673 is available (~Rust 1.60)
// and remove this block.
#[cfg(windows)]
let base_path = Path::new(
base_path
.as_path()
.to_string_lossy()
.trim_start_matches(r"\\?\"),
)
.to_path_buf();
conf.base_path(base_path);
let options = "";
if!options.is_empty()
|
let include_srcs = vec![
];
conf.include_srcs(include_srcs);
conf
};
conf
.run(&[
"eden_config.thrift"
])
.expect("Failed while running thrift compilation");
}
|
{
conf.options(options);
}
|
conditional_block
|
thrift_build.rs
|
// @generated by autocargo
use std::env;
use std::fs;
use std::path::Path;
use thrift_compiler::Config;
#[rustfmt::skip]
fn main()
|
.canonicalize()
.expect("Failed to canonicalize base_path");
// TODO: replace canonicalize() with std::path::absolute() when
// https://github.com/rust-lang/rust/pull/91673 is available (~Rust 1.60)
// and remove this block.
#[cfg(windows)]
let base_path = Path::new(
base_path
.as_path()
.to_string_lossy()
.trim_start_matches(r"\\?\"),
)
.to_path_buf();
conf.base_path(base_path);
let options = "";
if!options.is_empty() {
conf.options(options);
}
let include_srcs = vec![
];
conf.include_srcs(include_srcs);
conf
};
conf
.run(&[
"eden_config.thrift"
])
.expect("Failed while running thrift compilation");
}
|
{
// Rerun if this gets rewritten.
println!("cargo:rerun-if-changed=thrift_build.rs");
let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided");
let out_dir: &Path = out_dir.as_ref();
fs::write(
out_dir.join("cratemap"),
"eden_config crate",
).expect("Failed to write cratemap");
let conf = {
let mut conf = Config::from_env().expect("Failed to instantiate thrift_compiler::Config");
let path_from_manifest_to_base: &Path = "../../..".as_ref();
let cargo_manifest_dir =
env::var_os("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR not provided");
let cargo_manifest_dir: &Path = cargo_manifest_dir.as_ref();
let base_path = cargo_manifest_dir
.join(path_from_manifest_to_base)
|
identifier_body
|
thrift_build.rs
|
// @generated by autocargo
use std::env;
use std::fs;
use std::path::Path;
use thrift_compiler::Config;
#[rustfmt::skip]
fn main() {
// Rerun if this gets rewritten.
println!("cargo:rerun-if-changed=thrift_build.rs");
let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided");
let out_dir: &Path = out_dir.as_ref();
fs::write(
out_dir.join("cratemap"),
"eden_config crate",
).expect("Failed to write cratemap");
let conf = {
let mut conf = Config::from_env().expect("Failed to instantiate thrift_compiler::Config");
let path_from_manifest_to_base: &Path = "../../..".as_ref();
let cargo_manifest_dir =
env::var_os("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR not provided");
let cargo_manifest_dir: &Path = cargo_manifest_dir.as_ref();
let base_path = cargo_manifest_dir
.join(path_from_manifest_to_base)
.canonicalize()
.expect("Failed to canonicalize base_path");
// TODO: replace canonicalize() with std::path::absolute() when
// https://github.com/rust-lang/rust/pull/91673 is available (~Rust 1.60)
// and remove this block.
#[cfg(windows)]
let base_path = Path::new(
base_path
.as_path()
.to_string_lossy()
.trim_start_matches(r"\\?\"),
)
.to_path_buf();
conf.base_path(base_path);
let options = "";
if!options.is_empty() {
|
let include_srcs = vec![
];
conf.include_srcs(include_srcs);
conf
};
conf
.run(&[
"eden_config.thrift"
])
.expect("Failed while running thrift compilation");
}
|
conf.options(options);
}
|
random_line_split
|
thrift_build.rs
|
// @generated by autocargo
use std::env;
use std::fs;
use std::path::Path;
use thrift_compiler::Config;
#[rustfmt::skip]
fn
|
() {
// Rerun if this gets rewritten.
println!("cargo:rerun-if-changed=thrift_build.rs");
let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided");
let out_dir: &Path = out_dir.as_ref();
fs::write(
out_dir.join("cratemap"),
"eden_config crate",
).expect("Failed to write cratemap");
let conf = {
let mut conf = Config::from_env().expect("Failed to instantiate thrift_compiler::Config");
let path_from_manifest_to_base: &Path = "../../..".as_ref();
let cargo_manifest_dir =
env::var_os("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR not provided");
let cargo_manifest_dir: &Path = cargo_manifest_dir.as_ref();
let base_path = cargo_manifest_dir
.join(path_from_manifest_to_base)
.canonicalize()
.expect("Failed to canonicalize base_path");
// TODO: replace canonicalize() with std::path::absolute() when
// https://github.com/rust-lang/rust/pull/91673 is available (~Rust 1.60)
// and remove this block.
#[cfg(windows)]
let base_path = Path::new(
base_path
.as_path()
.to_string_lossy()
.trim_start_matches(r"\\?\"),
)
.to_path_buf();
conf.base_path(base_path);
let options = "";
if!options.is_empty() {
conf.options(options);
}
let include_srcs = vec![
];
conf.include_srcs(include_srcs);
conf
};
conf
.run(&[
"eden_config.thrift"
])
.expect("Failed while running thrift compilation");
}
|
main
|
identifier_name
|
issue-1049.rs
|
// Test overlong function signature
pub unsafe fn reborrow_mut(
&mut X: Abcde,
) -> Handle<NodeRef<marker::Mut, K, V, NodeType>, HandleType> {
}
pub fn merge(
mut X: Abcdef,
) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
}
impl Handle {
pub fn merge(
a: Abcd,
) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge>
|
}
// Long function without return type that should not be reformated.
fn veeeeeeeeeeeeeeeeeeeeery_long_name(a: FirstTypeeeeeeeeee, b: SecondTypeeeeeeeeeeeeeeeeeeeeeee) {}
fn veeeeeeeeeeeeeeeeeeeeeery_long_name(a: FirstTypeeeeeeeeee, b: SecondTypeeeeeeeeeeeeeeeeeeeeeee) {
}
fn veeeeeeeeeeeeeeeeeeeeeeery_long_name(
a: FirstTypeeeeeeeeee,
b: SecondTypeeeeeeeeeeeeeeeeeeeeeee,
) {
}
|
{
}
|
identifier_body
|
issue-1049.rs
|
// Test overlong function signature
pub unsafe fn reborrow_mut(
&mut X: Abcde,
) -> Handle<NodeRef<marker::Mut, K, V, NodeType>, HandleType> {
}
pub fn
|
(
mut X: Abcdef,
) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
}
impl Handle {
pub fn merge(
a: Abcd,
) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
}
}
// Long function without return type that should not be reformated.
fn veeeeeeeeeeeeeeeeeeeeery_long_name(a: FirstTypeeeeeeeeee, b: SecondTypeeeeeeeeeeeeeeeeeeeeeee) {}
fn veeeeeeeeeeeeeeeeeeeeeery_long_name(a: FirstTypeeeeeeeeee, b: SecondTypeeeeeeeeeeeeeeeeeeeeeee) {
}
fn veeeeeeeeeeeeeeeeeeeeeeery_long_name(
a: FirstTypeeeeeeeeee,
b: SecondTypeeeeeeeeeeeeeeeeeeeeeee,
) {
}
|
merge
|
identifier_name
|
issue-1049.rs
|
// Test overlong function signature
pub unsafe fn reborrow_mut(
&mut X: Abcde,
) -> Handle<NodeRef<marker::Mut, K, V, NodeType>, HandleType> {
}
|
impl Handle {
pub fn merge(
a: Abcd,
) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
}
}
// Long function without return type that should not be reformated.
fn veeeeeeeeeeeeeeeeeeeeery_long_name(a: FirstTypeeeeeeeeee, b: SecondTypeeeeeeeeeeeeeeeeeeeeeee) {}
fn veeeeeeeeeeeeeeeeeeeeeery_long_name(a: FirstTypeeeeeeeeee, b: SecondTypeeeeeeeeeeeeeeeeeeeeeee) {
}
fn veeeeeeeeeeeeeeeeeeeeeeery_long_name(
a: FirstTypeeeeeeeeee,
b: SecondTypeeeeeeeeeeeeeeeeeeeeeee,
) {
}
|
pub fn merge(
mut X: Abcdef,
) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
}
|
random_line_split
|
type_.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_upper_case_globals)]
use llvm;
use llvm::{TypeRef, Bool, False, True, TypeKind, ValueRef};
use llvm::{Float, Double, X86_FP80, PPC_FP128, FP128};
use trans::context::CrateContext;
use util::nodemap::FnvHashMap;
use syntax::ast;
use std::ffi::CString;
use std::mem;
use std::ptr;
use std::cell::RefCell;
use libc::c_uint;
#[derive(Clone, Copy, PartialEq, Debug)]
#[repr(C)]
pub struct Type {
rf: TypeRef
}
macro_rules! ty {
($e:expr) => ( Type::from_ref(unsafe { $e }))
}
/// Wrapper for LLVM TypeRef
impl Type {
#[inline(always)]
pub fn from_ref(r: TypeRef) -> Type {
Type {
rf: r
}
}
#[inline(always)] // So it doesn't kill --opt-level=0 builds of the compiler
pub fn to_ref(&self) -> TypeRef {
self.rf
}
pub fn void(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMVoidTypeInContext(ccx.llcx()))
}
pub fn nil(ccx: &CrateContext) -> Type {
Type::empty_struct(ccx)
}
pub fn metadata(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMMetadataTypeInContext(ccx.llcx()))
}
pub fn i1(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt1TypeInContext(ccx.llcx()))
}
pub fn i8(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt8TypeInContext(ccx.llcx()))
}
pub fn i16(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt16TypeInContext(ccx.llcx()))
}
pub fn i32(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt32TypeInContext(ccx.llcx()))
}
pub fn i64(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt64TypeInContext(ccx.llcx()))
}
// Creates an integer type with the given number of bits, e.g. i24
pub fn ix(ccx: &CrateContext, num_bits: u64) -> Type {
ty!(llvm::LLVMIntTypeInContext(ccx.llcx(), num_bits as c_uint))
}
pub fn f32(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMFloatTypeInContext(ccx.llcx()))
}
pub fn f64(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMDoubleTypeInContext(ccx.llcx()))
}
pub fn bool(ccx: &CrateContext) -> Type {
Type::i8(ccx)
}
pub fn char(ccx: &CrateContext) -> Type {
Type::i32(ccx)
}
pub fn i8p(ccx: &CrateContext) -> Type {
Type::i8(ccx).ptr_to()
}
pub fn int(ccx: &CrateContext) -> Type {
match &ccx.tcx().sess.target.target.target_pointer_width[..] {
"32" => Type::i32(ccx),
"64" => Type::i64(ccx),
tws => panic!("Unsupported target word size for int: {}", tws),
}
}
pub fn int_from_ty(ccx: &CrateContext, t: ast::IntTy) -> Type {
match t {
ast::TyIs => ccx.int_type(),
ast::TyI8 => Type::i8(ccx),
ast::TyI16 => Type::i16(ccx),
ast::TyI32 => Type::i32(ccx),
ast::TyI64 => Type::i64(ccx)
}
}
pub fn uint_from_ty(ccx: &CrateContext, t: ast::UintTy) -> Type {
match t {
ast::TyUs => ccx.int_type(),
ast::TyU8 => Type::i8(ccx),
ast::TyU16 => Type::i16(ccx),
ast::TyU32 => Type::i32(ccx),
ast::TyU64 => Type::i64(ccx)
}
}
pub fn float_from_ty(ccx: &CrateContext, t: ast::FloatTy) -> Type {
match t {
ast::TyF32 => Type::f32(ccx),
ast::TyF64 => Type::f64(ccx),
}
}
pub fn func(args: &[Type], ret: &Type) -> Type {
let vec : &[TypeRef] = unsafe { mem::transmute(args) };
ty!(llvm::LLVMFunctionType(ret.to_ref(), vec.as_ptr(),
args.len() as c_uint, False))
}
pub fn variadic_func(args: &[Type], ret: &Type) -> Type {
let vec : &[TypeRef] = unsafe { mem::transmute(args) };
ty!(llvm::LLVMFunctionType(ret.to_ref(), vec.as_ptr(),
args.len() as c_uint, True))
}
pub fn struct_(ccx: &CrateContext, els: &[Type], packed: bool) -> Type {
let els : &[TypeRef] = unsafe { mem::transmute(els) };
ty!(llvm::LLVMStructTypeInContext(ccx.llcx(), els.as_ptr(),
els.len() as c_uint,
packed as Bool))
}
pub fn named_struct(ccx: &CrateContext, name: &str) -> Type {
let name = CString::new(name).unwrap();
ty!(llvm::LLVMStructCreateNamed(ccx.llcx(), name.as_ptr()))
}
pub fn empty_struct(ccx: &CrateContext) -> Type {
Type::struct_(ccx, &[], false)
}
pub fn glue_fn(ccx: &CrateContext, t: Type) -> Type {
Type::func(&[t], &Type::void(ccx))
}
pub fn array(ty: &Type, len: u64) -> Type {
ty!(llvm::LLVMRustArrayType(ty.to_ref(), len))
}
pub fn vector(ty: &Type, len: u64) -> Type {
ty!(llvm::LLVMVectorType(ty.to_ref(), len as c_uint))
}
pub fn vec(ccx: &CrateContext, ty: &Type) -> Type {
Type::struct_(ccx,
&[Type::array(ty, 0), Type::int(ccx)],
false)
}
pub fn opaque_vec(ccx: &CrateContext) -> Type {
Type::vec(ccx, &Type::i8(ccx))
}
pub fn vtable_ptr(ccx: &CrateContext) -> Type {
Type::glue_fn(ccx, Type::i8p(ccx)).ptr_to().ptr_to()
}
pub fn kind(&self) -> TypeKind {
unsafe {
llvm::LLVMGetTypeKind(self.to_ref())
}
}
pub fn set_struct_body(&mut self, els: &[Type], packed: bool) {
unsafe {
let vec : &[TypeRef] = mem::transmute(els);
llvm::LLVMStructSetBody(self.to_ref(), vec.as_ptr(),
els.len() as c_uint, packed as Bool)
}
}
pub fn ptr_to(&self) -> Type {
ty!(llvm::LLVMPointerType(self.to_ref(), 0))
}
pub fn is_aggregate(&self) -> bool {
match self.kind() {
TypeKind::Struct | TypeKind::Array => true,
_ => false
}
}
pub fn is_packed(&self) -> bool {
unsafe {
llvm::LLVMIsPackedStruct(self.to_ref()) == True
}
}
pub fn element_type(&self) -> Type {
unsafe {
Type::from_ref(llvm::LLVMGetElementType(self.to_ref()))
}
}
/// Return the number of elements in `self` if it is a LLVM vector type.
pub fn vector_length(&self) -> usize
|
pub fn array_length(&self) -> usize {
unsafe {
llvm::LLVMGetArrayLength(self.to_ref()) as usize
}
}
pub fn field_types(&self) -> Vec<Type> {
unsafe {
let n_elts = llvm::LLVMCountStructElementTypes(self.to_ref()) as usize;
if n_elts == 0 {
return Vec::new();
}
let mut elts = vec![Type { rf: ptr::null_mut() }; n_elts];
llvm::LLVMGetStructElementTypes(self.to_ref(),
elts.as_mut_ptr() as *mut TypeRef);
elts
}
}
pub fn return_type(&self) -> Type {
ty!(llvm::LLVMGetReturnType(self.to_ref()))
}
pub fn func_params(&self) -> Vec<Type> {
unsafe {
let n_args = llvm::LLVMCountParamTypes(self.to_ref()) as usize;
let mut args = vec![Type { rf: ptr::null_mut() }; n_args];
llvm::LLVMGetParamTypes(self.to_ref(),
args.as_mut_ptr() as *mut TypeRef);
args
}
}
pub fn float_width(&self) -> usize {
match self.kind() {
Float => 32,
Double => 64,
X86_FP80 => 80,
FP128 | PPC_FP128 => 128,
_ => panic!("llvm_float_width called on a non-float type")
}
}
/// Retrieve the bit width of the integer type `self`.
pub fn int_width(&self) -> u64 {
unsafe {
llvm::LLVMGetIntTypeWidth(self.to_ref()) as u64
}
}
}
/* Memory-managed object interface to type handles. */
pub struct TypeNames {
named_types: RefCell<FnvHashMap<String, TypeRef>>,
}
impl TypeNames {
pub fn new() -> TypeNames {
TypeNames {
named_types: RefCell::new(FnvHashMap())
}
}
pub fn associate_type(&self, s: &str, t: &Type) {
assert!(self.named_types.borrow_mut().insert(s.to_string(),
t.to_ref()).is_none());
}
pub fn find_type(&self, s: &str) -> Option<Type> {
self.named_types.borrow().get(s).map(|x| Type::from_ref(*x))
}
pub fn type_to_string(&self, ty: Type) -> String {
llvm::build_string(|s| unsafe {
llvm::LLVMWriteTypeToString(ty.to_ref(), s);
}).expect("non-UTF8 type description from LLVM")
}
pub fn types_to_str(&self, tys: &[Type]) -> String {
let strs: Vec<String> = tys.iter().map(|t| self.type_to_string(*t)).collect();
format!("[{}]", strs.connect(","))
}
pub fn val_to_string(&self, val: ValueRef) -> String {
llvm::build_string(|s| unsafe {
llvm::LLVMWriteValueToString(val, s);
}).expect("nun-UTF8 value description from LLVM")
}
}
|
{
unsafe {
llvm::LLVMGetVectorSize(self.to_ref()) as usize
}
}
|
identifier_body
|
type_.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_upper_case_globals)]
use llvm;
use llvm::{TypeRef, Bool, False, True, TypeKind, ValueRef};
use llvm::{Float, Double, X86_FP80, PPC_FP128, FP128};
use trans::context::CrateContext;
use util::nodemap::FnvHashMap;
use syntax::ast;
use std::ffi::CString;
use std::mem;
use std::ptr;
use std::cell::RefCell;
use libc::c_uint;
#[derive(Clone, Copy, PartialEq, Debug)]
#[repr(C)]
pub struct Type {
rf: TypeRef
}
macro_rules! ty {
($e:expr) => ( Type::from_ref(unsafe { $e }))
}
/// Wrapper for LLVM TypeRef
impl Type {
#[inline(always)]
pub fn from_ref(r: TypeRef) -> Type {
Type {
rf: r
}
}
#[inline(always)] // So it doesn't kill --opt-level=0 builds of the compiler
pub fn to_ref(&self) -> TypeRef {
self.rf
}
pub fn void(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMVoidTypeInContext(ccx.llcx()))
}
pub fn nil(ccx: &CrateContext) -> Type {
Type::empty_struct(ccx)
}
pub fn metadata(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMMetadataTypeInContext(ccx.llcx()))
}
pub fn i1(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt1TypeInContext(ccx.llcx()))
}
pub fn i8(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt8TypeInContext(ccx.llcx()))
}
pub fn i16(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt16TypeInContext(ccx.llcx()))
}
pub fn i32(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt32TypeInContext(ccx.llcx()))
}
pub fn i64(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt64TypeInContext(ccx.llcx()))
}
// Creates an integer type with the given number of bits, e.g. i24
pub fn ix(ccx: &CrateContext, num_bits: u64) -> Type {
ty!(llvm::LLVMIntTypeInContext(ccx.llcx(), num_bits as c_uint))
}
pub fn f32(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMFloatTypeInContext(ccx.llcx()))
}
pub fn f64(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMDoubleTypeInContext(ccx.llcx()))
}
pub fn bool(ccx: &CrateContext) -> Type {
Type::i8(ccx)
}
pub fn char(ccx: &CrateContext) -> Type {
Type::i32(ccx)
}
pub fn i8p(ccx: &CrateContext) -> Type {
Type::i8(ccx).ptr_to()
}
pub fn int(ccx: &CrateContext) -> Type {
match &ccx.tcx().sess.target.target.target_pointer_width[..] {
"32" => Type::i32(ccx),
"64" => Type::i64(ccx),
tws => panic!("Unsupported target word size for int: {}", tws),
}
}
pub fn int_from_ty(ccx: &CrateContext, t: ast::IntTy) -> Type {
match t {
ast::TyIs => ccx.int_type(),
ast::TyI8 => Type::i8(ccx),
ast::TyI16 => Type::i16(ccx),
ast::TyI32 => Type::i32(ccx),
ast::TyI64 => Type::i64(ccx)
}
}
pub fn uint_from_ty(ccx: &CrateContext, t: ast::UintTy) -> Type {
match t {
ast::TyUs => ccx.int_type(),
ast::TyU8 => Type::i8(ccx),
ast::TyU16 => Type::i16(ccx),
ast::TyU32 => Type::i32(ccx),
ast::TyU64 => Type::i64(ccx)
}
}
pub fn float_from_ty(ccx: &CrateContext, t: ast::FloatTy) -> Type {
match t {
ast::TyF32 => Type::f32(ccx),
ast::TyF64 => Type::f64(ccx),
}
}
|
args.len() as c_uint, False))
}
pub fn variadic_func(args: &[Type], ret: &Type) -> Type {
let vec : &[TypeRef] = unsafe { mem::transmute(args) };
ty!(llvm::LLVMFunctionType(ret.to_ref(), vec.as_ptr(),
args.len() as c_uint, True))
}
pub fn struct_(ccx: &CrateContext, els: &[Type], packed: bool) -> Type {
let els : &[TypeRef] = unsafe { mem::transmute(els) };
ty!(llvm::LLVMStructTypeInContext(ccx.llcx(), els.as_ptr(),
els.len() as c_uint,
packed as Bool))
}
pub fn named_struct(ccx: &CrateContext, name: &str) -> Type {
let name = CString::new(name).unwrap();
ty!(llvm::LLVMStructCreateNamed(ccx.llcx(), name.as_ptr()))
}
pub fn empty_struct(ccx: &CrateContext) -> Type {
Type::struct_(ccx, &[], false)
}
pub fn glue_fn(ccx: &CrateContext, t: Type) -> Type {
Type::func(&[t], &Type::void(ccx))
}
pub fn array(ty: &Type, len: u64) -> Type {
ty!(llvm::LLVMRustArrayType(ty.to_ref(), len))
}
pub fn vector(ty: &Type, len: u64) -> Type {
ty!(llvm::LLVMVectorType(ty.to_ref(), len as c_uint))
}
pub fn vec(ccx: &CrateContext, ty: &Type) -> Type {
Type::struct_(ccx,
&[Type::array(ty, 0), Type::int(ccx)],
false)
}
pub fn opaque_vec(ccx: &CrateContext) -> Type {
Type::vec(ccx, &Type::i8(ccx))
}
pub fn vtable_ptr(ccx: &CrateContext) -> Type {
Type::glue_fn(ccx, Type::i8p(ccx)).ptr_to().ptr_to()
}
pub fn kind(&self) -> TypeKind {
unsafe {
llvm::LLVMGetTypeKind(self.to_ref())
}
}
pub fn set_struct_body(&mut self, els: &[Type], packed: bool) {
unsafe {
let vec : &[TypeRef] = mem::transmute(els);
llvm::LLVMStructSetBody(self.to_ref(), vec.as_ptr(),
els.len() as c_uint, packed as Bool)
}
}
pub fn ptr_to(&self) -> Type {
ty!(llvm::LLVMPointerType(self.to_ref(), 0))
}
pub fn is_aggregate(&self) -> bool {
match self.kind() {
TypeKind::Struct | TypeKind::Array => true,
_ => false
}
}
pub fn is_packed(&self) -> bool {
unsafe {
llvm::LLVMIsPackedStruct(self.to_ref()) == True
}
}
pub fn element_type(&self) -> Type {
unsafe {
Type::from_ref(llvm::LLVMGetElementType(self.to_ref()))
}
}
/// Return the number of elements in `self` if it is a LLVM vector type.
pub fn vector_length(&self) -> usize {
unsafe {
llvm::LLVMGetVectorSize(self.to_ref()) as usize
}
}
pub fn array_length(&self) -> usize {
unsafe {
llvm::LLVMGetArrayLength(self.to_ref()) as usize
}
}
pub fn field_types(&self) -> Vec<Type> {
unsafe {
let n_elts = llvm::LLVMCountStructElementTypes(self.to_ref()) as usize;
if n_elts == 0 {
return Vec::new();
}
let mut elts = vec![Type { rf: ptr::null_mut() }; n_elts];
llvm::LLVMGetStructElementTypes(self.to_ref(),
elts.as_mut_ptr() as *mut TypeRef);
elts
}
}
pub fn return_type(&self) -> Type {
ty!(llvm::LLVMGetReturnType(self.to_ref()))
}
pub fn func_params(&self) -> Vec<Type> {
unsafe {
let n_args = llvm::LLVMCountParamTypes(self.to_ref()) as usize;
let mut args = vec![Type { rf: ptr::null_mut() }; n_args];
llvm::LLVMGetParamTypes(self.to_ref(),
args.as_mut_ptr() as *mut TypeRef);
args
}
}
pub fn float_width(&self) -> usize {
match self.kind() {
Float => 32,
Double => 64,
X86_FP80 => 80,
FP128 | PPC_FP128 => 128,
_ => panic!("llvm_float_width called on a non-float type")
}
}
/// Retrieve the bit width of the integer type `self`.
pub fn int_width(&self) -> u64 {
unsafe {
llvm::LLVMGetIntTypeWidth(self.to_ref()) as u64
}
}
}
/* Memory-managed object interface to type handles. */
pub struct TypeNames {
named_types: RefCell<FnvHashMap<String, TypeRef>>,
}
impl TypeNames {
pub fn new() -> TypeNames {
TypeNames {
named_types: RefCell::new(FnvHashMap())
}
}
pub fn associate_type(&self, s: &str, t: &Type) {
assert!(self.named_types.borrow_mut().insert(s.to_string(),
t.to_ref()).is_none());
}
pub fn find_type(&self, s: &str) -> Option<Type> {
self.named_types.borrow().get(s).map(|x| Type::from_ref(*x))
}
pub fn type_to_string(&self, ty: Type) -> String {
llvm::build_string(|s| unsafe {
llvm::LLVMWriteTypeToString(ty.to_ref(), s);
}).expect("non-UTF8 type description from LLVM")
}
pub fn types_to_str(&self, tys: &[Type]) -> String {
let strs: Vec<String> = tys.iter().map(|t| self.type_to_string(*t)).collect();
format!("[{}]", strs.connect(","))
}
pub fn val_to_string(&self, val: ValueRef) -> String {
llvm::build_string(|s| unsafe {
llvm::LLVMWriteValueToString(val, s);
}).expect("nun-UTF8 value description from LLVM")
}
}
|
pub fn func(args: &[Type], ret: &Type) -> Type {
let vec : &[TypeRef] = unsafe { mem::transmute(args) };
ty!(llvm::LLVMFunctionType(ret.to_ref(), vec.as_ptr(),
|
random_line_split
|
type_.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_upper_case_globals)]
use llvm;
use llvm::{TypeRef, Bool, False, True, TypeKind, ValueRef};
use llvm::{Float, Double, X86_FP80, PPC_FP128, FP128};
use trans::context::CrateContext;
use util::nodemap::FnvHashMap;
use syntax::ast;
use std::ffi::CString;
use std::mem;
use std::ptr;
use std::cell::RefCell;
use libc::c_uint;
#[derive(Clone, Copy, PartialEq, Debug)]
#[repr(C)]
pub struct Type {
rf: TypeRef
}
macro_rules! ty {
($e:expr) => ( Type::from_ref(unsafe { $e }))
}
/// Wrapper for LLVM TypeRef
impl Type {
#[inline(always)]
pub fn from_ref(r: TypeRef) -> Type {
Type {
rf: r
}
}
#[inline(always)] // So it doesn't kill --opt-level=0 builds of the compiler
pub fn to_ref(&self) -> TypeRef {
self.rf
}
pub fn void(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMVoidTypeInContext(ccx.llcx()))
}
pub fn nil(ccx: &CrateContext) -> Type {
Type::empty_struct(ccx)
}
pub fn metadata(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMMetadataTypeInContext(ccx.llcx()))
}
pub fn i1(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt1TypeInContext(ccx.llcx()))
}
pub fn i8(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt8TypeInContext(ccx.llcx()))
}
pub fn i16(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt16TypeInContext(ccx.llcx()))
}
pub fn i32(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt32TypeInContext(ccx.llcx()))
}
pub fn i64(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMInt64TypeInContext(ccx.llcx()))
}
// Creates an integer type with the given number of bits, e.g. i24
pub fn ix(ccx: &CrateContext, num_bits: u64) -> Type {
ty!(llvm::LLVMIntTypeInContext(ccx.llcx(), num_bits as c_uint))
}
pub fn f32(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMFloatTypeInContext(ccx.llcx()))
}
pub fn f64(ccx: &CrateContext) -> Type {
ty!(llvm::LLVMDoubleTypeInContext(ccx.llcx()))
}
pub fn bool(ccx: &CrateContext) -> Type {
Type::i8(ccx)
}
pub fn char(ccx: &CrateContext) -> Type {
Type::i32(ccx)
}
pub fn i8p(ccx: &CrateContext) -> Type {
Type::i8(ccx).ptr_to()
}
pub fn int(ccx: &CrateContext) -> Type {
match &ccx.tcx().sess.target.target.target_pointer_width[..] {
"32" => Type::i32(ccx),
"64" => Type::i64(ccx),
tws => panic!("Unsupported target word size for int: {}", tws),
}
}
pub fn int_from_ty(ccx: &CrateContext, t: ast::IntTy) -> Type {
match t {
ast::TyIs => ccx.int_type(),
ast::TyI8 => Type::i8(ccx),
ast::TyI16 => Type::i16(ccx),
ast::TyI32 => Type::i32(ccx),
ast::TyI64 => Type::i64(ccx)
}
}
pub fn uint_from_ty(ccx: &CrateContext, t: ast::UintTy) -> Type {
match t {
ast::TyUs => ccx.int_type(),
ast::TyU8 => Type::i8(ccx),
ast::TyU16 => Type::i16(ccx),
ast::TyU32 => Type::i32(ccx),
ast::TyU64 => Type::i64(ccx)
}
}
pub fn float_from_ty(ccx: &CrateContext, t: ast::FloatTy) -> Type {
match t {
ast::TyF32 => Type::f32(ccx),
ast::TyF64 => Type::f64(ccx),
}
}
pub fn func(args: &[Type], ret: &Type) -> Type {
let vec : &[TypeRef] = unsafe { mem::transmute(args) };
ty!(llvm::LLVMFunctionType(ret.to_ref(), vec.as_ptr(),
args.len() as c_uint, False))
}
pub fn variadic_func(args: &[Type], ret: &Type) -> Type {
let vec : &[TypeRef] = unsafe { mem::transmute(args) };
ty!(llvm::LLVMFunctionType(ret.to_ref(), vec.as_ptr(),
args.len() as c_uint, True))
}
pub fn struct_(ccx: &CrateContext, els: &[Type], packed: bool) -> Type {
let els : &[TypeRef] = unsafe { mem::transmute(els) };
ty!(llvm::LLVMStructTypeInContext(ccx.llcx(), els.as_ptr(),
els.len() as c_uint,
packed as Bool))
}
pub fn named_struct(ccx: &CrateContext, name: &str) -> Type {
let name = CString::new(name).unwrap();
ty!(llvm::LLVMStructCreateNamed(ccx.llcx(), name.as_ptr()))
}
pub fn empty_struct(ccx: &CrateContext) -> Type {
Type::struct_(ccx, &[], false)
}
pub fn glue_fn(ccx: &CrateContext, t: Type) -> Type {
Type::func(&[t], &Type::void(ccx))
}
pub fn array(ty: &Type, len: u64) -> Type {
ty!(llvm::LLVMRustArrayType(ty.to_ref(), len))
}
pub fn vector(ty: &Type, len: u64) -> Type {
ty!(llvm::LLVMVectorType(ty.to_ref(), len as c_uint))
}
pub fn vec(ccx: &CrateContext, ty: &Type) -> Type {
Type::struct_(ccx,
&[Type::array(ty, 0), Type::int(ccx)],
false)
}
pub fn opaque_vec(ccx: &CrateContext) -> Type {
Type::vec(ccx, &Type::i8(ccx))
}
pub fn vtable_ptr(ccx: &CrateContext) -> Type {
Type::glue_fn(ccx, Type::i8p(ccx)).ptr_to().ptr_to()
}
pub fn kind(&self) -> TypeKind {
unsafe {
llvm::LLVMGetTypeKind(self.to_ref())
}
}
pub fn set_struct_body(&mut self, els: &[Type], packed: bool) {
unsafe {
let vec : &[TypeRef] = mem::transmute(els);
llvm::LLVMStructSetBody(self.to_ref(), vec.as_ptr(),
els.len() as c_uint, packed as Bool)
}
}
pub fn ptr_to(&self) -> Type {
ty!(llvm::LLVMPointerType(self.to_ref(), 0))
}
pub fn is_aggregate(&self) -> bool {
match self.kind() {
TypeKind::Struct | TypeKind::Array => true,
_ => false
}
}
pub fn is_packed(&self) -> bool {
unsafe {
llvm::LLVMIsPackedStruct(self.to_ref()) == True
}
}
pub fn
|
(&self) -> Type {
unsafe {
Type::from_ref(llvm::LLVMGetElementType(self.to_ref()))
}
}
/// Return the number of elements in `self` if it is a LLVM vector type.
pub fn vector_length(&self) -> usize {
unsafe {
llvm::LLVMGetVectorSize(self.to_ref()) as usize
}
}
pub fn array_length(&self) -> usize {
unsafe {
llvm::LLVMGetArrayLength(self.to_ref()) as usize
}
}
pub fn field_types(&self) -> Vec<Type> {
unsafe {
let n_elts = llvm::LLVMCountStructElementTypes(self.to_ref()) as usize;
if n_elts == 0 {
return Vec::new();
}
let mut elts = vec![Type { rf: ptr::null_mut() }; n_elts];
llvm::LLVMGetStructElementTypes(self.to_ref(),
elts.as_mut_ptr() as *mut TypeRef);
elts
}
}
pub fn return_type(&self) -> Type {
ty!(llvm::LLVMGetReturnType(self.to_ref()))
}
pub fn func_params(&self) -> Vec<Type> {
unsafe {
let n_args = llvm::LLVMCountParamTypes(self.to_ref()) as usize;
let mut args = vec![Type { rf: ptr::null_mut() }; n_args];
llvm::LLVMGetParamTypes(self.to_ref(),
args.as_mut_ptr() as *mut TypeRef);
args
}
}
pub fn float_width(&self) -> usize {
match self.kind() {
Float => 32,
Double => 64,
X86_FP80 => 80,
FP128 | PPC_FP128 => 128,
_ => panic!("llvm_float_width called on a non-float type")
}
}
/// Retrieve the bit width of the integer type `self`.
pub fn int_width(&self) -> u64 {
unsafe {
llvm::LLVMGetIntTypeWidth(self.to_ref()) as u64
}
}
}
/* Memory-managed object interface to type handles. */
pub struct TypeNames {
named_types: RefCell<FnvHashMap<String, TypeRef>>,
}
impl TypeNames {
pub fn new() -> TypeNames {
TypeNames {
named_types: RefCell::new(FnvHashMap())
}
}
pub fn associate_type(&self, s: &str, t: &Type) {
assert!(self.named_types.borrow_mut().insert(s.to_string(),
t.to_ref()).is_none());
}
pub fn find_type(&self, s: &str) -> Option<Type> {
self.named_types.borrow().get(s).map(|x| Type::from_ref(*x))
}
pub fn type_to_string(&self, ty: Type) -> String {
llvm::build_string(|s| unsafe {
llvm::LLVMWriteTypeToString(ty.to_ref(), s);
}).expect("non-UTF8 type description from LLVM")
}
pub fn types_to_str(&self, tys: &[Type]) -> String {
let strs: Vec<String> = tys.iter().map(|t| self.type_to_string(*t)).collect();
format!("[{}]", strs.connect(","))
}
pub fn val_to_string(&self, val: ValueRef) -> String {
llvm::build_string(|s| unsafe {
llvm::LLVMWriteValueToString(val, s);
}).expect("nun-UTF8 value description from LLVM")
}
}
|
element_type
|
identifier_name
|
method-on-struct.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-android: FIXME(#10381)
// min-lldb-version: 310
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// STACK BY REF
// gdb-command:print *self
// gdb-check:$1 = {x = 100}
// gdb-command:print arg1
// gdb-check:$2 = -1
// gdb-command:print arg2
// gdb-check:$3 = -2
// gdb-command:continue
// STACK BY VAL
// gdb-command:print self
// gdb-check:$4 = {x = 100}
// gdb-command:print arg1
// gdb-check:$5 = -3
// gdb-command:print arg2
// gdb-check:$6 = -4
// gdb-command:continue
// OWNED BY REF
// gdb-command:print *self
// gdb-check:$7 = {x = 200}
// gdb-command:print arg1
// gdb-check:$8 = -5
// gdb-command:print arg2
// gdb-check:$9 = -6
// gdb-command:continue
// OWNED BY VAL
// gdb-command:print self
// gdb-check:$10 = {x = 200}
// gdb-command:print arg1
// gdb-check:$11 = -7
// gdb-command:print arg2
// gdb-check:$12 = -8
// gdb-command:continue
// OWNED MOVED
// gdb-command:print *self
// gdb-check:$13 = {x = 200}
// gdb-command:print arg1
// gdb-check:$14 = -9
// gdb-command:print arg2
// gdb-check:$15 = -10
// gdb-command:continue
// === LLDB TESTS ==================================================================================
// lldb-command:run
// STACK BY REF
// lldb-command:print *self
// lldb-check:[...]$0 = Struct { x: 100 }
// lldb-command:print arg1
// lldb-check:[...]$1 = -1
// lldb-command:print arg2
// lldb-check:[...]$2 = -2
// lldb-command:continue
// STACK BY VAL
// lldb-command:print self
// lldb-check:[...]$3 = Struct { x: 100 }
// lldb-command:print arg1
// lldb-check:[...]$4 = -3
// lldb-command:print arg2
// lldb-check:[...]$5 = -4
// lldb-command:continue
// OWNED BY REF
// lldb-command:print *self
// lldb-check:[...]$6 = Struct { x: 200 }
// lldb-command:print arg1
// lldb-check:[...]$7 = -5
// lldb-command:print arg2
// lldb-check:[...]$8 = -6
// lldb-command:continue
// OWNED BY VAL
// lldb-command:print self
// lldb-check:[...]$9 = Struct { x: 200 }
// lldb-command:print arg1
// lldb-check:[...]$10 = -7
// lldb-command:print arg2
// lldb-check:[...]$11 = -8
|
// lldb-command:print arg1
// lldb-check:[...]$13 = -9
// lldb-command:print arg2
// lldb-check:[...]$14 = -10
// lldb-command:continue
#![feature(box_syntax)]
#![omit_gdb_pretty_printer_section]
#[derive(Copy)]
struct Struct {
x: int
}
impl Struct {
fn self_by_ref(&self, arg1: int, arg2: int) -> int {
zzz(); // #break
self.x + arg1 + arg2
}
fn self_by_val(self, arg1: int, arg2: int) -> int {
zzz(); // #break
self.x + arg1 + arg2
}
fn self_owned(self: Box<Struct>, arg1: int, arg2: int) -> int {
zzz(); // #break
self.x + arg1 + arg2
}
}
fn main() {
let stack = Struct { x: 100 };
let _ = stack.self_by_ref(-1, -2);
let _ = stack.self_by_val(-3, -4);
let owned = box Struct { x: 200 };
let _ = owned.self_by_ref(-5, -6);
let _ = owned.self_by_val(-7, -8);
let _ = owned.self_owned(-9, -10);
}
fn zzz() {()}
|
// lldb-command:continue
// OWNED MOVED
// lldb-command:print *self
// lldb-check:[...]$12 = Struct { x: 200 }
|
random_line_split
|
method-on-struct.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-android: FIXME(#10381)
// min-lldb-version: 310
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// STACK BY REF
// gdb-command:print *self
// gdb-check:$1 = {x = 100}
// gdb-command:print arg1
// gdb-check:$2 = -1
// gdb-command:print arg2
// gdb-check:$3 = -2
// gdb-command:continue
// STACK BY VAL
// gdb-command:print self
// gdb-check:$4 = {x = 100}
// gdb-command:print arg1
// gdb-check:$5 = -3
// gdb-command:print arg2
// gdb-check:$6 = -4
// gdb-command:continue
// OWNED BY REF
// gdb-command:print *self
// gdb-check:$7 = {x = 200}
// gdb-command:print arg1
// gdb-check:$8 = -5
// gdb-command:print arg2
// gdb-check:$9 = -6
// gdb-command:continue
// OWNED BY VAL
// gdb-command:print self
// gdb-check:$10 = {x = 200}
// gdb-command:print arg1
// gdb-check:$11 = -7
// gdb-command:print arg2
// gdb-check:$12 = -8
// gdb-command:continue
// OWNED MOVED
// gdb-command:print *self
// gdb-check:$13 = {x = 200}
// gdb-command:print arg1
// gdb-check:$14 = -9
// gdb-command:print arg2
// gdb-check:$15 = -10
// gdb-command:continue
// === LLDB TESTS ==================================================================================
// lldb-command:run
// STACK BY REF
// lldb-command:print *self
// lldb-check:[...]$0 = Struct { x: 100 }
// lldb-command:print arg1
// lldb-check:[...]$1 = -1
// lldb-command:print arg2
// lldb-check:[...]$2 = -2
// lldb-command:continue
// STACK BY VAL
// lldb-command:print self
// lldb-check:[...]$3 = Struct { x: 100 }
// lldb-command:print arg1
// lldb-check:[...]$4 = -3
// lldb-command:print arg2
// lldb-check:[...]$5 = -4
// lldb-command:continue
// OWNED BY REF
// lldb-command:print *self
// lldb-check:[...]$6 = Struct { x: 200 }
// lldb-command:print arg1
// lldb-check:[...]$7 = -5
// lldb-command:print arg2
// lldb-check:[...]$8 = -6
// lldb-command:continue
// OWNED BY VAL
// lldb-command:print self
// lldb-check:[...]$9 = Struct { x: 200 }
// lldb-command:print arg1
// lldb-check:[...]$10 = -7
// lldb-command:print arg2
// lldb-check:[...]$11 = -8
// lldb-command:continue
// OWNED MOVED
// lldb-command:print *self
// lldb-check:[...]$12 = Struct { x: 200 }
// lldb-command:print arg1
// lldb-check:[...]$13 = -9
// lldb-command:print arg2
// lldb-check:[...]$14 = -10
// lldb-command:continue
#![feature(box_syntax)]
#![omit_gdb_pretty_printer_section]
#[derive(Copy)]
struct Struct {
x: int
}
impl Struct {
fn self_by_ref(&self, arg1: int, arg2: int) -> int {
zzz(); // #break
self.x + arg1 + arg2
}
fn self_by_val(self, arg1: int, arg2: int) -> int {
zzz(); // #break
self.x + arg1 + arg2
}
fn
|
(self: Box<Struct>, arg1: int, arg2: int) -> int {
zzz(); // #break
self.x + arg1 + arg2
}
}
fn main() {
let stack = Struct { x: 100 };
let _ = stack.self_by_ref(-1, -2);
let _ = stack.self_by_val(-3, -4);
let owned = box Struct { x: 200 };
let _ = owned.self_by_ref(-5, -6);
let _ = owned.self_by_val(-7, -8);
let _ = owned.self_owned(-9, -10);
}
fn zzz() {()}
|
self_owned
|
identifier_name
|
atomic-access-bool.rs
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(stable_features)]
#![feature(atomic_access)]
use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT};
use std::sync::atomic::Ordering::*;
static mut ATOMIC: AtomicBool = ATOMIC_BOOL_INIT;
fn
|
() {
unsafe {
assert_eq!(*ATOMIC.get_mut(), false);
ATOMIC.store(true, SeqCst);
assert_eq!(*ATOMIC.get_mut(), true);
ATOMIC.fetch_or(false, SeqCst);
assert_eq!(*ATOMIC.get_mut(), true);
ATOMIC.fetch_and(false, SeqCst);
assert_eq!(*ATOMIC.get_mut(), false);
ATOMIC.fetch_nand(true, SeqCst);
assert_eq!(*ATOMIC.get_mut(), true);
ATOMIC.fetch_xor(true, SeqCst);
assert_eq!(*ATOMIC.get_mut(), false);
}
}
|
main
|
identifier_name
|
atomic-access-bool.rs
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(stable_features)]
#![feature(atomic_access)]
use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT};
use std::sync::atomic::Ordering::*;
static mut ATOMIC: AtomicBool = ATOMIC_BOOL_INIT;
fn main()
|
{
unsafe {
assert_eq!(*ATOMIC.get_mut(), false);
ATOMIC.store(true, SeqCst);
assert_eq!(*ATOMIC.get_mut(), true);
ATOMIC.fetch_or(false, SeqCst);
assert_eq!(*ATOMIC.get_mut(), true);
ATOMIC.fetch_and(false, SeqCst);
assert_eq!(*ATOMIC.get_mut(), false);
ATOMIC.fetch_nand(true, SeqCst);
assert_eq!(*ATOMIC.get_mut(), true);
ATOMIC.fetch_xor(true, SeqCst);
assert_eq!(*ATOMIC.get_mut(), false);
}
}
|
identifier_body
|
|
atomic-access-bool.rs
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(stable_features)]
#![feature(atomic_access)]
|
static mut ATOMIC: AtomicBool = ATOMIC_BOOL_INIT;
fn main() {
unsafe {
assert_eq!(*ATOMIC.get_mut(), false);
ATOMIC.store(true, SeqCst);
assert_eq!(*ATOMIC.get_mut(), true);
ATOMIC.fetch_or(false, SeqCst);
assert_eq!(*ATOMIC.get_mut(), true);
ATOMIC.fetch_and(false, SeqCst);
assert_eq!(*ATOMIC.get_mut(), false);
ATOMIC.fetch_nand(true, SeqCst);
assert_eq!(*ATOMIC.get_mut(), true);
ATOMIC.fetch_xor(true, SeqCst);
assert_eq!(*ATOMIC.get_mut(), false);
}
}
|
use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT};
use std::sync::atomic::Ordering::*;
|
random_line_split
|
font.mako.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<%helpers:shorthand name="font" sub_properties="font-style font-variant font-weight
font-size line-height font-family"
spec="https://drafts.csswg.org/css-fonts-3/#propdef-font">
use properties::longhands::{font_style, font_variant, font_weight, font_size,
line_height, font_family};
pub fn parse_value(context: &ParserContext, input: &mut Parser) -> Result<Longhands, ()> {
let mut nb_normals = 0;
let mut style = None;
let mut variant = None;
let mut weight = None;
let size;
loop {
// Special-case 'normal' because it is valid in each of
// font-style, font-weight and font-variant.
// Leaves the values to None, 'normal' is the initial value for each of them.
if input.try(|input| input.expect_ident_matching("normal")).is_ok() {
nb_normals += 1;
continue;
}
if style.is_none() {
if let Ok(value) = input.try(|input| font_style::parse(context, input)) {
style = Some(value);
continue
}
}
if weight.is_none() {
if let Ok(value) = input.try(|input| font_weight::parse(context, input)) {
weight = Some(value);
continue
}
}
if variant.is_none() {
if let Ok(value) = input.try(|input| font_variant::parse(context, input)) {
variant = Some(value);
continue
}
}
size = Some(try!(font_size::parse(context, input)));
break
}
#[inline]
fn count<T>(opt: &Option<T>) -> u8 {
if opt.is_some() { 1 } else { 0 }
}
if size.is_none() || (count(&style) + count(&weight) + count(&variant) + nb_normals) > 3 {
return Err(())
}
let line_height = if input.try(|input| input.expect_delim('/')).is_ok() {
Some(try!(line_height::parse(context, input)))
} else {
None
};
let family = try!(input.parse_comma_separated(font_family::parse_one_family));
Ok(Longhands {
font_style: style,
font_variant: variant,
font_weight: weight,
font_size: size,
line_height: line_height,
font_family: Some(font_family::SpecifiedValue(family))
})
}
// This may be a bit off, unsure, possibly needs changes
impl<'a> LonghandsToSerialize<'a> {
fn to_css_declared<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write
|
_ => {
try!(write!(dest, "/"));
try!(height.to_css(dest));
}
}
}
try!(write!(dest, " "));
self.font_family.to_css(dest)
}
}
</%helpers:shorthand>
|
{
if let DeclaredValue::Value(ref style) = *self.font_style {
try!(style.to_css(dest));
try!(write!(dest, " "));
}
if let DeclaredValue::Value(ref variant) = *self.font_variant {
try!(variant.to_css(dest));
try!(write!(dest, " "));
}
if let DeclaredValue::Value(ref weight) = *self.font_weight {
try!(weight.to_css(dest));
try!(write!(dest, " "));
}
try!(self.font_size.to_css(dest));
if let DeclaredValue::Value(ref height) = *self.line_height {
match *height {
line_height::SpecifiedValue::Normal => {},
|
identifier_body
|
font.mako.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<%helpers:shorthand name="font" sub_properties="font-style font-variant font-weight
font-size line-height font-family"
spec="https://drafts.csswg.org/css-fonts-3/#propdef-font">
use properties::longhands::{font_style, font_variant, font_weight, font_size,
line_height, font_family};
pub fn parse_value(context: &ParserContext, input: &mut Parser) -> Result<Longhands, ()> {
let mut nb_normals = 0;
let mut style = None;
let mut variant = None;
let mut weight = None;
let size;
loop {
// Special-case 'normal' because it is valid in each of
// font-style, font-weight and font-variant.
// Leaves the values to None, 'normal' is the initial value for each of them.
if input.try(|input| input.expect_ident_matching("normal")).is_ok() {
nb_normals += 1;
continue;
}
if style.is_none() {
if let Ok(value) = input.try(|input| font_style::parse(context, input)) {
style = Some(value);
continue
}
}
if weight.is_none() {
if let Ok(value) = input.try(|input| font_weight::parse(context, input)) {
weight = Some(value);
continue
}
}
if variant.is_none() {
if let Ok(value) = input.try(|input| font_variant::parse(context, input)) {
variant = Some(value);
continue
}
}
size = Some(try!(font_size::parse(context, input)));
break
}
#[inline]
fn count<T>(opt: &Option<T>) -> u8 {
if opt.is_some() { 1 } else { 0 }
}
if size.is_none() || (count(&style) + count(&weight) + count(&variant) + nb_normals) > 3 {
return Err(())
}
let line_height = if input.try(|input| input.expect_delim('/')).is_ok() {
Some(try!(line_height::parse(context, input)))
} else {
None
};
let family = try!(input.parse_comma_separated(font_family::parse_one_family));
Ok(Longhands {
font_style: style,
font_variant: variant,
font_weight: weight,
font_size: size,
line_height: line_height,
font_family: Some(font_family::SpecifiedValue(family))
})
}
// This may be a bit off, unsure, possibly needs changes
impl<'a> LonghandsToSerialize<'a> {
fn
|
<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
if let DeclaredValue::Value(ref style) = *self.font_style {
try!(style.to_css(dest));
try!(write!(dest, " "));
}
if let DeclaredValue::Value(ref variant) = *self.font_variant {
try!(variant.to_css(dest));
try!(write!(dest, " "));
}
if let DeclaredValue::Value(ref weight) = *self.font_weight {
try!(weight.to_css(dest));
try!(write!(dest, " "));
}
try!(self.font_size.to_css(dest));
if let DeclaredValue::Value(ref height) = *self.line_height {
match *height {
line_height::SpecifiedValue::Normal => {},
_ => {
try!(write!(dest, "/"));
try!(height.to_css(dest));
}
}
}
try!(write!(dest, " "));
self.font_family.to_css(dest)
}
}
</%helpers:shorthand>
|
to_css_declared
|
identifier_name
|
font.mako.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<%helpers:shorthand name="font" sub_properties="font-style font-variant font-weight
font-size line-height font-family"
spec="https://drafts.csswg.org/css-fonts-3/#propdef-font">
use properties::longhands::{font_style, font_variant, font_weight, font_size,
line_height, font_family};
pub fn parse_value(context: &ParserContext, input: &mut Parser) -> Result<Longhands, ()> {
let mut nb_normals = 0;
let mut style = None;
let mut variant = None;
let mut weight = None;
let size;
loop {
// Special-case 'normal' because it is valid in each of
// font-style, font-weight and font-variant.
// Leaves the values to None, 'normal' is the initial value for each of them.
if input.try(|input| input.expect_ident_matching("normal")).is_ok() {
nb_normals += 1;
continue;
}
if style.is_none() {
if let Ok(value) = input.try(|input| font_style::parse(context, input)) {
style = Some(value);
continue
}
}
if weight.is_none() {
if let Ok(value) = input.try(|input| font_weight::parse(context, input)) {
weight = Some(value);
continue
}
}
if variant.is_none() {
if let Ok(value) = input.try(|input| font_variant::parse(context, input))
|
}
size = Some(try!(font_size::parse(context, input)));
break
}
#[inline]
fn count<T>(opt: &Option<T>) -> u8 {
if opt.is_some() { 1 } else { 0 }
}
if size.is_none() || (count(&style) + count(&weight) + count(&variant) + nb_normals) > 3 {
return Err(())
}
let line_height = if input.try(|input| input.expect_delim('/')).is_ok() {
Some(try!(line_height::parse(context, input)))
} else {
None
};
let family = try!(input.parse_comma_separated(font_family::parse_one_family));
Ok(Longhands {
font_style: style,
font_variant: variant,
font_weight: weight,
font_size: size,
line_height: line_height,
font_family: Some(font_family::SpecifiedValue(family))
})
}
// This may be a bit off, unsure, possibly needs changes
impl<'a> LonghandsToSerialize<'a> {
fn to_css_declared<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
if let DeclaredValue::Value(ref style) = *self.font_style {
try!(style.to_css(dest));
try!(write!(dest, " "));
}
if let DeclaredValue::Value(ref variant) = *self.font_variant {
try!(variant.to_css(dest));
try!(write!(dest, " "));
}
if let DeclaredValue::Value(ref weight) = *self.font_weight {
try!(weight.to_css(dest));
try!(write!(dest, " "));
}
try!(self.font_size.to_css(dest));
if let DeclaredValue::Value(ref height) = *self.line_height {
match *height {
line_height::SpecifiedValue::Normal => {},
_ => {
try!(write!(dest, "/"));
try!(height.to_css(dest));
}
}
}
try!(write!(dest, " "));
self.font_family.to_css(dest)
}
}
</%helpers:shorthand>
|
{
variant = Some(value);
continue
}
|
conditional_block
|
font.mako.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<%helpers:shorthand name="font" sub_properties="font-style font-variant font-weight
font-size line-height font-family"
spec="https://drafts.csswg.org/css-fonts-3/#propdef-font">
use properties::longhands::{font_style, font_variant, font_weight, font_size,
line_height, font_family};
pub fn parse_value(context: &ParserContext, input: &mut Parser) -> Result<Longhands, ()> {
let mut nb_normals = 0;
let mut style = None;
let mut variant = None;
let mut weight = None;
let size;
loop {
// Special-case 'normal' because it is valid in each of
// font-style, font-weight and font-variant.
// Leaves the values to None, 'normal' is the initial value for each of them.
if input.try(|input| input.expect_ident_matching("normal")).is_ok() {
nb_normals += 1;
continue;
}
if style.is_none() {
if let Ok(value) = input.try(|input| font_style::parse(context, input)) {
style = Some(value);
continue
}
}
if weight.is_none() {
if let Ok(value) = input.try(|input| font_weight::parse(context, input)) {
weight = Some(value);
continue
}
}
if variant.is_none() {
if let Ok(value) = input.try(|input| font_variant::parse(context, input)) {
variant = Some(value);
continue
}
}
size = Some(try!(font_size::parse(context, input)));
break
}
#[inline]
fn count<T>(opt: &Option<T>) -> u8 {
if opt.is_some() { 1 } else { 0 }
}
if size.is_none() || (count(&style) + count(&weight) + count(&variant) + nb_normals) > 3 {
return Err(())
}
let line_height = if input.try(|input| input.expect_delim('/')).is_ok() {
Some(try!(line_height::parse(context, input)))
} else {
None
};
let family = try!(input.parse_comma_separated(font_family::parse_one_family));
Ok(Longhands {
font_style: style,
font_variant: variant,
font_weight: weight,
font_size: size,
line_height: line_height,
font_family: Some(font_family::SpecifiedValue(family))
})
}
// This may be a bit off, unsure, possibly needs changes
impl<'a> LonghandsToSerialize<'a> {
fn to_css_declared<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
if let DeclaredValue::Value(ref style) = *self.font_style {
try!(style.to_css(dest));
try!(write!(dest, " "));
}
if let DeclaredValue::Value(ref variant) = *self.font_variant {
try!(variant.to_css(dest));
try!(write!(dest, " "));
}
if let DeclaredValue::Value(ref weight) = *self.font_weight {
try!(weight.to_css(dest));
try!(write!(dest, " "));
}
try!(self.font_size.to_css(dest));
if let DeclaredValue::Value(ref height) = *self.line_height {
match *height {
line_height::SpecifiedValue::Normal => {},
_ => {
try!(write!(dest, "/"));
try!(height.to_css(dest));
}
}
}
try!(write!(dest, " "));
self.font_family.to_css(dest)
}
}
|
</%helpers:shorthand>
|
random_line_split
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.