file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
main.rs
|
mod tokenizer;
mod executor;
use tokenizer::*;
use executor::{execute, Numeric};
use std::collections::HashMap;
use std::io::prelude::*;
use std::io;
fn main() {
// contain all program variables
let mut variables: HashMap<String, executor::Numeric> = HashMap::new();
// string to execute
let mut buffer = String::new();
loop {
print!(">> ");
io::stdout().flush()
.ok()
.expect( "[error] Can't flush to stdout!" );
io::stdin().read_line(&mut buffer)
.ok()
.expect( "[error] Can't read line from stdin!" );
// ignore null strings
if buffer.trim().len() == 0 {
continue;
}
// split string to tokens
let data = tokenizer::tokenize(buffer.trim());
// execute operation (check by exit flag)
if execute(&mut variables, &data)
|
// clean string
buffer.clear();
}
}
|
{
break;
}
|
conditional_block
|
needless_borrow.rs
|
// run-rustfix
|
*y
}
#[warn(clippy::all, clippy::needless_borrow)]
#[allow(unused_variables)]
fn main() {
let a = 5;
let b = x(&a);
let c = x(&&a);
let s = &String::from("hi");
let s_ident = f(&s); // should not error, because `&String` implements Copy, but `String` does not
let g_val = g(&Vec::new()); // should not error, because `&Vec<T>` derefs to `&[T]`
let vec = Vec::new();
let vec_val = g(&vec); // should not error, because `&Vec<T>` derefs to `&[T]`
h(&"foo"); // should not error, because the `&&str` is required, due to `&Trait`
let garbl = match 42 {
44 => &a,
45 => {
println!("foo");
&&a // FIXME: this should lint, too
},
46 => &&a,
_ => panic!(),
};
}
fn f<T: Copy>(y: &T) -> T {
*y
}
fn g(y: &[u8]) -> u8 {
y[0]
}
trait Trait {}
impl<'a> Trait for &'a str {}
fn h(_: &dyn Trait) {}
|
#![allow(clippy::needless_borrowed_reference)]
fn x(y: &i32) -> i32 {
|
random_line_split
|
needless_borrow.rs
|
// run-rustfix
#![allow(clippy::needless_borrowed_reference)]
fn x(y: &i32) -> i32 {
*y
}
#[warn(clippy::all, clippy::needless_borrow)]
#[allow(unused_variables)]
fn main() {
let a = 5;
let b = x(&a);
let c = x(&&a);
let s = &String::from("hi");
let s_ident = f(&s); // should not error, because `&String` implements Copy, but `String` does not
let g_val = g(&Vec::new()); // should not error, because `&Vec<T>` derefs to `&[T]`
let vec = Vec::new();
let vec_val = g(&vec); // should not error, because `&Vec<T>` derefs to `&[T]`
h(&"foo"); // should not error, because the `&&str` is required, due to `&Trait`
let garbl = match 42 {
44 => &a,
45 => {
println!("foo");
&&a // FIXME: this should lint, too
},
46 => &&a,
_ => panic!(),
};
}
fn f<T: Copy>(y: &T) -> T {
*y
}
fn
|
(y: &[u8]) -> u8 {
y[0]
}
trait Trait {}
impl<'a> Trait for &'a str {}
fn h(_: &dyn Trait) {}
|
g
|
identifier_name
|
needless_borrow.rs
|
// run-rustfix
#![allow(clippy::needless_borrowed_reference)]
fn x(y: &i32) -> i32 {
*y
}
#[warn(clippy::all, clippy::needless_borrow)]
#[allow(unused_variables)]
fn main()
|
fn f<T: Copy>(y: &T) -> T {
*y
}
fn g(y: &[u8]) -> u8 {
y[0]
}
trait Trait {}
impl<'a> Trait for &'a str {}
fn h(_: &dyn Trait) {}
|
{
let a = 5;
let b = x(&a);
let c = x(&&a);
let s = &String::from("hi");
let s_ident = f(&s); // should not error, because `&String` implements Copy, but `String` does not
let g_val = g(&Vec::new()); // should not error, because `&Vec<T>` derefs to `&[T]`
let vec = Vec::new();
let vec_val = g(&vec); // should not error, because `&Vec<T>` derefs to `&[T]`
h(&"foo"); // should not error, because the `&&str` is required, due to `&Trait`
let garbl = match 42 {
44 => &a,
45 => {
println!("foo");
&&a // FIXME: this should lint, too
},
46 => &&a,
_ => panic!(),
};
}
|
identifier_body
|
graph.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
A graph module for use in dataflow, region resolution, and elsewhere.
# Interface details
You customize the graph by specifying a "node data" type `N` and an
"edge data" type `E`. You can then later gain access (mutable or
immutable) to these "user-data" bits. Currently, you can only add
nodes or edges to the graph. You cannot remove or modify them once
added. This could be changed if we have a need.
# Implementation details
The main tricky thing about this code is the way that edges are
stored. The edges are stored in a central array, but they are also
threaded onto two linked lists for each node, one for incoming edges
and one for outgoing edges. Note that every edge is a member of some
incoming list and some outgoing list. Basically you can load the
first index of the linked list from the node data structures (the
field `first_edge`) and then, for each edge, load the next index from
the field `next_edge`). Each of those fields is an array that should
be indexed by the direction (see the type `Direction`).
*/
use std::uint;
use std::vec;
pub struct Graph<N,E> {
priv nodes: ~[Node<N>],
priv edges: ~[Edge<E>],
}
pub struct Node<N> {
priv first_edge: [EdgeIndex,..2], // see module comment
data: N,
}
pub struct Edge<E> {
priv next_edge: [EdgeIndex,..2], // see module comment
priv source: NodeIndex,
priv target: NodeIndex,
data: E,
}
#[deriving(Eq)]
pub struct NodeIndex(uint);
pub static InvalidNodeIndex: NodeIndex = NodeIndex(uint::max_value);
#[deriving(Eq)]
pub struct EdgeIndex(uint);
pub static InvalidEdgeIndex: EdgeIndex = EdgeIndex(uint::max_value);
// Use a private field here to guarantee no more instances are created:
pub struct Direction { priv repr: uint }
pub static Outgoing: Direction = Direction { repr: 0 };
pub static Incoming: Direction = Direction { repr: 1 };
impl<N,E> Graph<N,E> {
pub fn new() -> Graph<N,E> {
Graph {nodes: ~[], edges: ~[]}
}
pub fn with_capacity(num_nodes: uint,
num_edges: uint) -> Graph<N,E> {
Graph {nodes: vec::with_capacity(num_nodes),
edges: vec::with_capacity(num_edges)}
}
///////////////////////////////////////////////////////////////////////////
// Simple accessors
#[inline]
pub fn all_nodes<'a>(&'a self) -> &'a [Node<N>] {
let nodes: &'a [Node<N>] = self.nodes;
nodes
}
#[inline]
pub fn all_edges<'a>(&'a self) -> &'a [Edge<E>] {
let edges: &'a [Edge<E>] = self.edges;
edges
}
///////////////////////////////////////////////////////////////////////////
// Node construction
pub fn next_node_index(&self) -> NodeIndex {
NodeIndex(self.nodes.len())
}
pub fn add_node(&mut self, data: N) -> NodeIndex {
let idx = self.next_node_index();
self.nodes.push(Node {
first_edge: [InvalidEdgeIndex, InvalidEdgeIndex],
data: data
});
idx
}
pub fn mut_node_data<'a>(&'a mut self, idx: NodeIndex) -> &'a mut N {
&mut self.nodes[*idx].data
}
pub fn node_data<'a>(&'a self, idx: NodeIndex) -> &'a N {
&self.nodes[*idx].data
}
pub fn node<'a>(&'a self, idx: NodeIndex) -> &'a Node<N> {
&self.nodes[*idx]
}
///////////////////////////////////////////////////////////////////////////
// Edge construction and queries
pub fn next_edge_index(&self) -> EdgeIndex {
EdgeIndex(self.edges.len())
}
pub fn add_edge(&mut self,
source: NodeIndex,
target: NodeIndex,
data: E) -> EdgeIndex {
let idx = self.next_edge_index();
// read current first of the list of edges from each node
let source_first = self.nodes[*source].first_edge[Outgoing.repr];
let target_first = self.nodes[*target].first_edge[Incoming.repr];
// create the new edge, with the previous firsts from each node
// as the next pointers
self.edges.push(Edge {
next_edge: [source_first, target_first],
source: source,
target: target,
data: data
});
// adjust the firsts for each node target be the next object.
self.nodes[*source].first_edge[Outgoing.repr] = idx;
self.nodes[*target].first_edge[Incoming.repr] = idx;
return idx;
}
pub fn mut_edge_data<'a>(&'a mut self, idx: EdgeIndex) -> &'a mut E {
&mut self.edges[*idx].data
}
pub fn edge_data<'a>(&'a self, idx: EdgeIndex) -> &'a E {
&self.edges[*idx].data
}
pub fn edge<'a>(&'a self, idx: EdgeIndex) -> &'a Edge<E> {
&self.edges[*idx]
}
pub fn first_adjacent(&self, node: NodeIndex, dir: Direction) -> EdgeIndex {
//! Accesses the index of the first edge adjacent to `node`.
//! This is useful if you wish to modify the graph while walking
//! the linked list of edges.
self.nodes[*node].first_edge[dir.repr]
}
pub fn next_adjacent(&self, edge: EdgeIndex, dir: Direction) -> EdgeIndex {
//! Accesses the next edge in a given direction.
//! This is useful if you wish to modify the graph while walking
//! the linked list of edges.
self.edges[*edge].next_edge[dir.repr]
}
///////////////////////////////////////////////////////////////////////////
// Iterating over nodes, edges
pub fn each_node(&self, f: &fn(NodeIndex, &Node<N>) -> bool) -> bool {
//! Iterates over all edges defined in the graph.
self.nodes.iter().enumerate().advance(|(i, node)| f(NodeIndex(i), node))
}
pub fn each_edge(&self, f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
//! Iterates over all edges defined in the graph
self.edges.iter().enumerate().advance(|(i, edge)| f(EdgeIndex(i), edge))
}
pub fn each_outgoing_edge(&self,
source: NodeIndex,
f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
//! Iterates over all outgoing edges from the node `from`
self.each_adjacent_edge(source, Outgoing, f)
}
pub fn each_incoming_edge(&self,
target: NodeIndex,
f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
//! Iterates over all incoming edges to the node `target`
self.each_adjacent_edge(target, Incoming, f)
}
pub fn each_adjacent_edge(&self,
node: NodeIndex,
dir: Direction,
f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
//! Iterates over all edges adjacent to the node `node`
//! in the direction `dir` (either `Outgoing` or `Incoming)
let mut edge_idx = self.first_adjacent(node, dir);
while edge_idx!= InvalidEdgeIndex {
let edge = &self.edges[*edge_idx];
if!f(edge_idx, edge) {
return false;
}
edge_idx = edge.next_edge[dir.repr];
}
return true;
}
///////////////////////////////////////////////////////////////////////////
// Fixed-point iteration
//
// A common use for graphs in our compiler is to perform
// fixed-point iteration. In this case, each edge represents a
// constaint, and the nodes themselves are associated with
// variables or other bitsets. This method facilitates such a
// computation.
pub fn iterate_until_fixed_point(&self,
op: &fn(iter_index: uint,
edge_index: EdgeIndex,
edge: &Edge<E>) -> bool) {
let mut iteration = 0;
let mut changed = true;
while changed {
changed = false;
iteration += 1;
for (i, edge) in self.edges.iter().enumerate() {
changed |= op(iteration, EdgeIndex(i), edge);
}
}
}
}
pub fn each_edge_index(max_edge_index: EdgeIndex, f: &fn(EdgeIndex) -> bool) {
let mut i = 0;
let n = *max_edge_index;
while i < n {
if!f(EdgeIndex(i)) {
return;
}
i += 1;
}
}
impl<E> Edge<E> {
pub fn source(&self) -> NodeIndex {
self.source
}
pub fn target(&self) -> NodeIndex {
self.target
}
}
#[cfg(test)]
mod test {
use middle::graph::*;
type TestNode = Node<&'static str>;
type TestEdge = Edge<&'static str>;
type TestGraph = Graph<&'static str, &'static str>;
fn create_graph() -> TestGraph {
let mut graph = Graph::new();
// Create a simple graph
//
// A -+> B --> C
// | | ^
// | v |
// F D --> E
let a = graph.add_node("A");
let b = graph.add_node("B");
let c = graph.add_node("C");
let d = graph.add_node("D");
let e = graph.add_node("E");
let f = graph.add_node("F");
graph.add_edge(a, b, "AB");
graph.add_edge(b, c, "BC");
graph.add_edge(b, d, "BD");
graph.add_edge(d, e, "DE");
graph.add_edge(e, c, "EC");
graph.add_edge(f, b, "FB");
return graph;
}
#[test]
fn each_node() {
let graph = create_graph();
let expected = ["A", "B", "C", "D", "E", "F"];
do graph.each_node |idx, node| {
assert_eq!(&expected[*idx], graph.node_data(idx));
assert_eq!(expected[*idx], node.data);
true
};
}
#[test]
fn each_edge() {
let graph = create_graph();
let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
do graph.each_edge |idx, edge| {
assert_eq!(&expected[*idx], graph.edge_data(idx));
assert_eq!(expected[*idx], edge.data);
true
};
}
fn test_adjacent_edges<N:Eq,E:Eq>(graph: &Graph<N,E>,
start_index: NodeIndex,
start_data: N,
expected_incoming: &[(E,N)],
expected_outgoing: &[(E,N)]) {
assert_eq!(graph.node_data(start_index), &start_data);
let mut counter = 0;
|
assert!(counter < expected_incoming.len());
debug!("counter=%? expected=%? edge_index=%? edge=%?",
counter, expected_incoming[counter], edge_index, edge);
match expected_incoming[counter] {
(ref e, ref n) => {
assert_eq!(e, &edge.data);
assert_eq!(n, graph.node_data(edge.source));
assert_eq!(start_index, edge.target);
}
}
counter += 1;
true
};
assert_eq!(counter, expected_incoming.len());
let mut counter = 0;
do graph.each_outgoing_edge(start_index) |edge_index, edge| {
assert_eq!(graph.edge_data(edge_index), &edge.data);
assert!(counter < expected_outgoing.len());
debug!("counter=%? expected=%? edge_index=%? edge=%?",
counter, expected_outgoing[counter], edge_index, edge);
match expected_outgoing[counter] {
(ref e, ref n) => {
assert_eq!(e, &edge.data);
assert_eq!(start_index, edge.source);
assert_eq!(n, graph.node_data(edge.target));
}
}
counter += 1;
true
};
assert_eq!(counter, expected_outgoing.len());
}
#[test]
fn each_adjacent_from_a() {
let graph = create_graph();
test_adjacent_edges(&graph, NodeIndex(0), "A",
[],
[("AB", "B")]);
}
#[test]
fn each_adjacent_from_b() {
let graph = create_graph();
test_adjacent_edges(&graph, NodeIndex(1), "B",
[("FB", "F"), ("AB", "A"),],
[("BD", "D"), ("BC", "C"),]);
}
#[test]
fn each_adjacent_from_c() {
let graph = create_graph();
test_adjacent_edges(&graph, NodeIndex(2), "C",
[("EC", "E"), ("BC", "B")],
[]);
}
#[test]
fn each_adjacent_from_d() {
let graph = create_graph();
test_adjacent_edges(&graph, NodeIndex(3), "D",
[("BD", "B")],
[("DE", "E")]);
}
}
|
do graph.each_incoming_edge(start_index) |edge_index, edge| {
assert_eq!(graph.edge_data(edge_index), &edge.data);
|
random_line_split
|
graph.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
A graph module for use in dataflow, region resolution, and elsewhere.
# Interface details
You customize the graph by specifying a "node data" type `N` and an
"edge data" type `E`. You can then later gain access (mutable or
immutable) to these "user-data" bits. Currently, you can only add
nodes or edges to the graph. You cannot remove or modify them once
added. This could be changed if we have a need.
# Implementation details
The main tricky thing about this code is the way that edges are
stored. The edges are stored in a central array, but they are also
threaded onto two linked lists for each node, one for incoming edges
and one for outgoing edges. Note that every edge is a member of some
incoming list and some outgoing list. Basically you can load the
first index of the linked list from the node data structures (the
field `first_edge`) and then, for each edge, load the next index from
the field `next_edge`). Each of those fields is an array that should
be indexed by the direction (see the type `Direction`).
*/
use std::uint;
use std::vec;
pub struct Graph<N,E> {
priv nodes: ~[Node<N>],
priv edges: ~[Edge<E>],
}
pub struct Node<N> {
priv first_edge: [EdgeIndex,..2], // see module comment
data: N,
}
pub struct Edge<E> {
priv next_edge: [EdgeIndex,..2], // see module comment
priv source: NodeIndex,
priv target: NodeIndex,
data: E,
}
#[deriving(Eq)]
pub struct NodeIndex(uint);
pub static InvalidNodeIndex: NodeIndex = NodeIndex(uint::max_value);
#[deriving(Eq)]
pub struct EdgeIndex(uint);
pub static InvalidEdgeIndex: EdgeIndex = EdgeIndex(uint::max_value);
// Use a private field here to guarantee no more instances are created:
pub struct Direction { priv repr: uint }
pub static Outgoing: Direction = Direction { repr: 0 };
pub static Incoming: Direction = Direction { repr: 1 };
impl<N,E> Graph<N,E> {
pub fn new() -> Graph<N,E> {
Graph {nodes: ~[], edges: ~[]}
}
pub fn with_capacity(num_nodes: uint,
num_edges: uint) -> Graph<N,E> {
Graph {nodes: vec::with_capacity(num_nodes),
edges: vec::with_capacity(num_edges)}
}
///////////////////////////////////////////////////////////////////////////
// Simple accessors
#[inline]
pub fn all_nodes<'a>(&'a self) -> &'a [Node<N>] {
let nodes: &'a [Node<N>] = self.nodes;
nodes
}
#[inline]
pub fn all_edges<'a>(&'a self) -> &'a [Edge<E>] {
let edges: &'a [Edge<E>] = self.edges;
edges
}
///////////////////////////////////////////////////////////////////////////
// Node construction
pub fn next_node_index(&self) -> NodeIndex {
NodeIndex(self.nodes.len())
}
pub fn add_node(&mut self, data: N) -> NodeIndex {
let idx = self.next_node_index();
self.nodes.push(Node {
first_edge: [InvalidEdgeIndex, InvalidEdgeIndex],
data: data
});
idx
}
pub fn mut_node_data<'a>(&'a mut self, idx: NodeIndex) -> &'a mut N {
&mut self.nodes[*idx].data
}
pub fn node_data<'a>(&'a self, idx: NodeIndex) -> &'a N {
&self.nodes[*idx].data
}
pub fn node<'a>(&'a self, idx: NodeIndex) -> &'a Node<N> {
&self.nodes[*idx]
}
///////////////////////////////////////////////////////////////////////////
// Edge construction and queries
pub fn next_edge_index(&self) -> EdgeIndex {
EdgeIndex(self.edges.len())
}
pub fn add_edge(&mut self,
source: NodeIndex,
target: NodeIndex,
data: E) -> EdgeIndex {
let idx = self.next_edge_index();
// read current first of the list of edges from each node
let source_first = self.nodes[*source].first_edge[Outgoing.repr];
let target_first = self.nodes[*target].first_edge[Incoming.repr];
// create the new edge, with the previous firsts from each node
// as the next pointers
self.edges.push(Edge {
next_edge: [source_first, target_first],
source: source,
target: target,
data: data
});
// adjust the firsts for each node target be the next object.
self.nodes[*source].first_edge[Outgoing.repr] = idx;
self.nodes[*target].first_edge[Incoming.repr] = idx;
return idx;
}
pub fn mut_edge_data<'a>(&'a mut self, idx: EdgeIndex) -> &'a mut E {
&mut self.edges[*idx].data
}
pub fn edge_data<'a>(&'a self, idx: EdgeIndex) -> &'a E {
&self.edges[*idx].data
}
pub fn edge<'a>(&'a self, idx: EdgeIndex) -> &'a Edge<E> {
&self.edges[*idx]
}
pub fn first_adjacent(&self, node: NodeIndex, dir: Direction) -> EdgeIndex {
//! Accesses the index of the first edge adjacent to `node`.
//! This is useful if you wish to modify the graph while walking
//! the linked list of edges.
self.nodes[*node].first_edge[dir.repr]
}
pub fn next_adjacent(&self, edge: EdgeIndex, dir: Direction) -> EdgeIndex {
//! Accesses the next edge in a given direction.
//! This is useful if you wish to modify the graph while walking
//! the linked list of edges.
self.edges[*edge].next_edge[dir.repr]
}
///////////////////////////////////////////////////////////////////////////
// Iterating over nodes, edges
pub fn each_node(&self, f: &fn(NodeIndex, &Node<N>) -> bool) -> bool {
//! Iterates over all edges defined in the graph.
self.nodes.iter().enumerate().advance(|(i, node)| f(NodeIndex(i), node))
}
pub fn each_edge(&self, f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
//! Iterates over all edges defined in the graph
self.edges.iter().enumerate().advance(|(i, edge)| f(EdgeIndex(i), edge))
}
pub fn each_outgoing_edge(&self,
source: NodeIndex,
f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
//! Iterates over all outgoing edges from the node `from`
self.each_adjacent_edge(source, Outgoing, f)
}
pub fn each_incoming_edge(&self,
target: NodeIndex,
f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
//! Iterates over all incoming edges to the node `target`
self.each_adjacent_edge(target, Incoming, f)
}
pub fn each_adjacent_edge(&self,
node: NodeIndex,
dir: Direction,
f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
//! Iterates over all edges adjacent to the node `node`
//! in the direction `dir` (either `Outgoing` or `Incoming)
let mut edge_idx = self.first_adjacent(node, dir);
while edge_idx!= InvalidEdgeIndex {
let edge = &self.edges[*edge_idx];
if!f(edge_idx, edge) {
return false;
}
edge_idx = edge.next_edge[dir.repr];
}
return true;
}
///////////////////////////////////////////////////////////////////////////
// Fixed-point iteration
//
// A common use for graphs in our compiler is to perform
// fixed-point iteration. In this case, each edge represents a
// constaint, and the nodes themselves are associated with
// variables or other bitsets. This method facilitates such a
// computation.
pub fn iterate_until_fixed_point(&self,
op: &fn(iter_index: uint,
edge_index: EdgeIndex,
edge: &Edge<E>) -> bool) {
let mut iteration = 0;
let mut changed = true;
while changed {
changed = false;
iteration += 1;
for (i, edge) in self.edges.iter().enumerate() {
changed |= op(iteration, EdgeIndex(i), edge);
}
}
}
}
pub fn each_edge_index(max_edge_index: EdgeIndex, f: &fn(EdgeIndex) -> bool) {
let mut i = 0;
let n = *max_edge_index;
while i < n {
if!f(EdgeIndex(i)) {
return;
}
i += 1;
}
}
impl<E> Edge<E> {
pub fn
|
(&self) -> NodeIndex {
self.source
}
pub fn target(&self) -> NodeIndex {
self.target
}
}
#[cfg(test)]
mod test {
use middle::graph::*;
type TestNode = Node<&'static str>;
type TestEdge = Edge<&'static str>;
type TestGraph = Graph<&'static str, &'static str>;
fn create_graph() -> TestGraph {
let mut graph = Graph::new();
// Create a simple graph
//
// A -+> B --> C
// | | ^
// | v |
// F D --> E
let a = graph.add_node("A");
let b = graph.add_node("B");
let c = graph.add_node("C");
let d = graph.add_node("D");
let e = graph.add_node("E");
let f = graph.add_node("F");
graph.add_edge(a, b, "AB");
graph.add_edge(b, c, "BC");
graph.add_edge(b, d, "BD");
graph.add_edge(d, e, "DE");
graph.add_edge(e, c, "EC");
graph.add_edge(f, b, "FB");
return graph;
}
#[test]
fn each_node() {
let graph = create_graph();
let expected = ["A", "B", "C", "D", "E", "F"];
do graph.each_node |idx, node| {
assert_eq!(&expected[*idx], graph.node_data(idx));
assert_eq!(expected[*idx], node.data);
true
};
}
#[test]
fn each_edge() {
let graph = create_graph();
let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
do graph.each_edge |idx, edge| {
assert_eq!(&expected[*idx], graph.edge_data(idx));
assert_eq!(expected[*idx], edge.data);
true
};
}
fn test_adjacent_edges<N:Eq,E:Eq>(graph: &Graph<N,E>,
start_index: NodeIndex,
start_data: N,
expected_incoming: &[(E,N)],
expected_outgoing: &[(E,N)]) {
assert_eq!(graph.node_data(start_index), &start_data);
let mut counter = 0;
do graph.each_incoming_edge(start_index) |edge_index, edge| {
assert_eq!(graph.edge_data(edge_index), &edge.data);
assert!(counter < expected_incoming.len());
debug!("counter=%? expected=%? edge_index=%? edge=%?",
counter, expected_incoming[counter], edge_index, edge);
match expected_incoming[counter] {
(ref e, ref n) => {
assert_eq!(e, &edge.data);
assert_eq!(n, graph.node_data(edge.source));
assert_eq!(start_index, edge.target);
}
}
counter += 1;
true
};
assert_eq!(counter, expected_incoming.len());
let mut counter = 0;
do graph.each_outgoing_edge(start_index) |edge_index, edge| {
assert_eq!(graph.edge_data(edge_index), &edge.data);
assert!(counter < expected_outgoing.len());
debug!("counter=%? expected=%? edge_index=%? edge=%?",
counter, expected_outgoing[counter], edge_index, edge);
match expected_outgoing[counter] {
(ref e, ref n) => {
assert_eq!(e, &edge.data);
assert_eq!(start_index, edge.source);
assert_eq!(n, graph.node_data(edge.target));
}
}
counter += 1;
true
};
assert_eq!(counter, expected_outgoing.len());
}
#[test]
fn each_adjacent_from_a() {
let graph = create_graph();
test_adjacent_edges(&graph, NodeIndex(0), "A",
[],
[("AB", "B")]);
}
#[test]
fn each_adjacent_from_b() {
let graph = create_graph();
test_adjacent_edges(&graph, NodeIndex(1), "B",
[("FB", "F"), ("AB", "A"),],
[("BD", "D"), ("BC", "C"),]);
}
#[test]
fn each_adjacent_from_c() {
let graph = create_graph();
test_adjacent_edges(&graph, NodeIndex(2), "C",
[("EC", "E"), ("BC", "B")],
[]);
}
#[test]
fn each_adjacent_from_d() {
let graph = create_graph();
test_adjacent_edges(&graph, NodeIndex(3), "D",
[("BD", "B")],
[("DE", "E")]);
}
}
|
source
|
identifier_name
|
graph.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
A graph module for use in dataflow, region resolution, and elsewhere.
# Interface details
You customize the graph by specifying a "node data" type `N` and an
"edge data" type `E`. You can then later gain access (mutable or
immutable) to these "user-data" bits. Currently, you can only add
nodes or edges to the graph. You cannot remove or modify them once
added. This could be changed if we have a need.
# Implementation details
The main tricky thing about this code is the way that edges are
stored. The edges are stored in a central array, but they are also
threaded onto two linked lists for each node, one for incoming edges
and one for outgoing edges. Note that every edge is a member of some
incoming list and some outgoing list. Basically you can load the
first index of the linked list from the node data structures (the
field `first_edge`) and then, for each edge, load the next index from
the field `next_edge`). Each of those fields is an array that should
be indexed by the direction (see the type `Direction`).
*/
use std::uint;
use std::vec;
pub struct Graph<N,E> {
priv nodes: ~[Node<N>],
priv edges: ~[Edge<E>],
}
pub struct Node<N> {
priv first_edge: [EdgeIndex,..2], // see module comment
data: N,
}
pub struct Edge<E> {
priv next_edge: [EdgeIndex,..2], // see module comment
priv source: NodeIndex,
priv target: NodeIndex,
data: E,
}
#[deriving(Eq)]
pub struct NodeIndex(uint);
pub static InvalidNodeIndex: NodeIndex = NodeIndex(uint::max_value);
#[deriving(Eq)]
pub struct EdgeIndex(uint);
pub static InvalidEdgeIndex: EdgeIndex = EdgeIndex(uint::max_value);
// Use a private field here to guarantee no more instances are created:
pub struct Direction { priv repr: uint }
pub static Outgoing: Direction = Direction { repr: 0 };
pub static Incoming: Direction = Direction { repr: 1 };
impl<N,E> Graph<N,E> {
pub fn new() -> Graph<N,E> {
Graph {nodes: ~[], edges: ~[]}
}
pub fn with_capacity(num_nodes: uint,
num_edges: uint) -> Graph<N,E> {
Graph {nodes: vec::with_capacity(num_nodes),
edges: vec::with_capacity(num_edges)}
}
///////////////////////////////////////////////////////////////////////////
// Simple accessors
#[inline]
pub fn all_nodes<'a>(&'a self) -> &'a [Node<N>] {
let nodes: &'a [Node<N>] = self.nodes;
nodes
}
#[inline]
pub fn all_edges<'a>(&'a self) -> &'a [Edge<E>] {
let edges: &'a [Edge<E>] = self.edges;
edges
}
///////////////////////////////////////////////////////////////////////////
// Node construction
pub fn next_node_index(&self) -> NodeIndex {
NodeIndex(self.nodes.len())
}
pub fn add_node(&mut self, data: N) -> NodeIndex {
let idx = self.next_node_index();
self.nodes.push(Node {
first_edge: [InvalidEdgeIndex, InvalidEdgeIndex],
data: data
});
idx
}
pub fn mut_node_data<'a>(&'a mut self, idx: NodeIndex) -> &'a mut N {
&mut self.nodes[*idx].data
}
pub fn node_data<'a>(&'a self, idx: NodeIndex) -> &'a N {
&self.nodes[*idx].data
}
pub fn node<'a>(&'a self, idx: NodeIndex) -> &'a Node<N> {
&self.nodes[*idx]
}
///////////////////////////////////////////////////////////////////////////
// Edge construction and queries
pub fn next_edge_index(&self) -> EdgeIndex {
EdgeIndex(self.edges.len())
}
pub fn add_edge(&mut self,
source: NodeIndex,
target: NodeIndex,
data: E) -> EdgeIndex {
let idx = self.next_edge_index();
// read current first of the list of edges from each node
let source_first = self.nodes[*source].first_edge[Outgoing.repr];
let target_first = self.nodes[*target].first_edge[Incoming.repr];
// create the new edge, with the previous firsts from each node
// as the next pointers
self.edges.push(Edge {
next_edge: [source_first, target_first],
source: source,
target: target,
data: data
});
// adjust the firsts for each node target be the next object.
self.nodes[*source].first_edge[Outgoing.repr] = idx;
self.nodes[*target].first_edge[Incoming.repr] = idx;
return idx;
}
pub fn mut_edge_data<'a>(&'a mut self, idx: EdgeIndex) -> &'a mut E
|
pub fn edge_data<'a>(&'a self, idx: EdgeIndex) -> &'a E {
&self.edges[*idx].data
}
pub fn edge<'a>(&'a self, idx: EdgeIndex) -> &'a Edge<E> {
&self.edges[*idx]
}
pub fn first_adjacent(&self, node: NodeIndex, dir: Direction) -> EdgeIndex {
//! Accesses the index of the first edge adjacent to `node`.
//! This is useful if you wish to modify the graph while walking
//! the linked list of edges.
self.nodes[*node].first_edge[dir.repr]
}
pub fn next_adjacent(&self, edge: EdgeIndex, dir: Direction) -> EdgeIndex {
//! Accesses the next edge in a given direction.
//! This is useful if you wish to modify the graph while walking
//! the linked list of edges.
self.edges[*edge].next_edge[dir.repr]
}
///////////////////////////////////////////////////////////////////////////
// Iterating over nodes, edges
pub fn each_node(&self, f: &fn(NodeIndex, &Node<N>) -> bool) -> bool {
//! Iterates over all edges defined in the graph.
self.nodes.iter().enumerate().advance(|(i, node)| f(NodeIndex(i), node))
}
pub fn each_edge(&self, f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
//! Iterates over all edges defined in the graph
self.edges.iter().enumerate().advance(|(i, edge)| f(EdgeIndex(i), edge))
}
pub fn each_outgoing_edge(&self,
source: NodeIndex,
f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
//! Iterates over all outgoing edges from the node `from`
self.each_adjacent_edge(source, Outgoing, f)
}
pub fn each_incoming_edge(&self,
target: NodeIndex,
f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
//! Iterates over all incoming edges to the node `target`
self.each_adjacent_edge(target, Incoming, f)
}
pub fn each_adjacent_edge(&self,
node: NodeIndex,
dir: Direction,
f: &fn(EdgeIndex, &Edge<E>) -> bool) -> bool {
//! Iterates over all edges adjacent to the node `node`
//! in the direction `dir` (either `Outgoing` or `Incoming)
let mut edge_idx = self.first_adjacent(node, dir);
while edge_idx!= InvalidEdgeIndex {
let edge = &self.edges[*edge_idx];
if!f(edge_idx, edge) {
return false;
}
edge_idx = edge.next_edge[dir.repr];
}
return true;
}
///////////////////////////////////////////////////////////////////////////
// Fixed-point iteration
//
// A common use for graphs in our compiler is to perform
// fixed-point iteration. In this case, each edge represents a
// constaint, and the nodes themselves are associated with
// variables or other bitsets. This method facilitates such a
// computation.
pub fn iterate_until_fixed_point(&self,
op: &fn(iter_index: uint,
edge_index: EdgeIndex,
edge: &Edge<E>) -> bool) {
let mut iteration = 0;
let mut changed = true;
while changed {
changed = false;
iteration += 1;
for (i, edge) in self.edges.iter().enumerate() {
changed |= op(iteration, EdgeIndex(i), edge);
}
}
}
}
pub fn each_edge_index(max_edge_index: EdgeIndex, f: &fn(EdgeIndex) -> bool) {
let mut i = 0;
let n = *max_edge_index;
while i < n {
if!f(EdgeIndex(i)) {
return;
}
i += 1;
}
}
impl<E> Edge<E> {
pub fn source(&self) -> NodeIndex {
self.source
}
pub fn target(&self) -> NodeIndex {
self.target
}
}
#[cfg(test)]
mod test {
use middle::graph::*;
type TestNode = Node<&'static str>;
type TestEdge = Edge<&'static str>;
type TestGraph = Graph<&'static str, &'static str>;
fn create_graph() -> TestGraph {
let mut graph = Graph::new();
// Create a simple graph
//
// A -+> B --> C
// | | ^
// | v |
// F D --> E
let a = graph.add_node("A");
let b = graph.add_node("B");
let c = graph.add_node("C");
let d = graph.add_node("D");
let e = graph.add_node("E");
let f = graph.add_node("F");
graph.add_edge(a, b, "AB");
graph.add_edge(b, c, "BC");
graph.add_edge(b, d, "BD");
graph.add_edge(d, e, "DE");
graph.add_edge(e, c, "EC");
graph.add_edge(f, b, "FB");
return graph;
}
#[test]
fn each_node() {
let graph = create_graph();
let expected = ["A", "B", "C", "D", "E", "F"];
do graph.each_node |idx, node| {
assert_eq!(&expected[*idx], graph.node_data(idx));
assert_eq!(expected[*idx], node.data);
true
};
}
#[test]
fn each_edge() {
let graph = create_graph();
let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
do graph.each_edge |idx, edge| {
assert_eq!(&expected[*idx], graph.edge_data(idx));
assert_eq!(expected[*idx], edge.data);
true
};
}
fn test_adjacent_edges<N:Eq,E:Eq>(graph: &Graph<N,E>,
start_index: NodeIndex,
start_data: N,
expected_incoming: &[(E,N)],
expected_outgoing: &[(E,N)]) {
assert_eq!(graph.node_data(start_index), &start_data);
let mut counter = 0;
do graph.each_incoming_edge(start_index) |edge_index, edge| {
assert_eq!(graph.edge_data(edge_index), &edge.data);
assert!(counter < expected_incoming.len());
debug!("counter=%? expected=%? edge_index=%? edge=%?",
counter, expected_incoming[counter], edge_index, edge);
match expected_incoming[counter] {
(ref e, ref n) => {
assert_eq!(e, &edge.data);
assert_eq!(n, graph.node_data(edge.source));
assert_eq!(start_index, edge.target);
}
}
counter += 1;
true
};
assert_eq!(counter, expected_incoming.len());
let mut counter = 0;
do graph.each_outgoing_edge(start_index) |edge_index, edge| {
assert_eq!(graph.edge_data(edge_index), &edge.data);
assert!(counter < expected_outgoing.len());
debug!("counter=%? expected=%? edge_index=%? edge=%?",
counter, expected_outgoing[counter], edge_index, edge);
match expected_outgoing[counter] {
(ref e, ref n) => {
assert_eq!(e, &edge.data);
assert_eq!(start_index, edge.source);
assert_eq!(n, graph.node_data(edge.target));
}
}
counter += 1;
true
};
assert_eq!(counter, expected_outgoing.len());
}
#[test]
fn each_adjacent_from_a() {
let graph = create_graph();
test_adjacent_edges(&graph, NodeIndex(0), "A",
[],
[("AB", "B")]);
}
#[test]
fn each_adjacent_from_b() {
let graph = create_graph();
test_adjacent_edges(&graph, NodeIndex(1), "B",
[("FB", "F"), ("AB", "A"),],
[("BD", "D"), ("BC", "C"),]);
}
#[test]
fn each_adjacent_from_c() {
let graph = create_graph();
test_adjacent_edges(&graph, NodeIndex(2), "C",
[("EC", "E"), ("BC", "B")],
[]);
}
#[test]
fn each_adjacent_from_d() {
let graph = create_graph();
test_adjacent_edges(&graph, NodeIndex(3), "D",
[("BD", "B")],
[("DE", "E")]);
}
}
|
{
&mut self.edges[*idx].data
}
|
identifier_body
|
type_allocation_places.rs
|
#![allow(dead_code)]
use crate::config::MovableTypesHookOutput;
use crate::cpp_data::{CppItem, CppPath};
use crate::cpp_type::{CppPointerLikeTypeKind, CppType};
use crate::processor::ProcessorData;
use log::{info, trace};
use ritual_common::errors::Result;
use std::collections::HashMap;
#[derive(Default, Debug)]
struct TypeStats {
virtual_functions: Vec<String>,
pointer_encounters: Vec<String>,
non_pointer_encounters: Vec<String>,
}
fn log_results(data_map: &HashMap<CppPath, TypeStats>) {
for (name, stats) in data_map {
trace!("type = {}; stats = {:?}", name.to_cpp_pseudo_code(), stats);
}
for (path, stats) in data_map {
let suggestion = if stats.virtual_functions.is_empty() {
if stats.pointer_encounters.is_empty() {
if stats.non_pointer_encounters.len() == MAX_ITEMS {
"movable (no pointers, no virtual functions)"
} else {
"probably movable (no pointers, no virtual functions, but too few items)"
}
} else if stats.pointer_encounters.len() < 5
&& stats.non_pointer_encounters.len() == MAX_ITEMS
{
"probably movable (few pointers)"
} else if stats.pointer_encounters.len() == MAX_ITEMS {
"immovable (many pointers)"
} else {
"unknown (too few items)"
}
} else {
"immovable (has virtual functions)"
};
info!("{:?} is {}", path.to_templateless_string(), suggestion);
info!("path = {}", path.to_cpp_pseudo_code());
info!("* virtual_functions ({}):", stats.virtual_functions.len());
for item in &stats.virtual_functions {
info!("* * {}", item);
}
info!("* pointer_encounters ({}):", stats.pointer_encounters.len());
for item in &stats.pointer_encounters {
info!("* * {}", item);
}
info!(
"* non_pointer_encounters ({}):",
stats.non_pointer_encounters.len()
);
for item in &stats.non_pointer_encounters {
info!("* * {}", item);
}
}
}
fn check_type(
cpp_type: &CppType,
is_behind_pointer: bool,
data_map: &mut HashMap<CppPath, TypeStats>,
item_text: &str,
) {
match cpp_type {
CppType::Class(path) =>
|
CppType::PointerLike { kind, target,.. } => {
check_type(
target,
*kind == CppPointerLikeTypeKind::Pointer,
data_map,
item_text,
);
}
_ => {}
}
}
const MAX_ITEMS: usize = 10;
/// Detects the preferred type allocation place for each type based on
/// API of all known methods. Doesn't actually change the data,
/// only suggests stack allocated types for manual configuration.
pub fn suggest_allocation_places(data: &mut ProcessorData<'_>) -> Result<()> {
let mut data_map = HashMap::new();
for item in data.db.cpp_items() {
if item.source_id.is_some() {
continue;
}
if let CppItem::Type(type1) = &item.item {
if!type1.kind.is_class() {
continue;
}
if let Some(hook) = data.config.movable_types_hook() {
if hook(&type1.path)?!= MovableTypesHookOutput::Unknown {
continue;
}
}
let good_path = type1.path.deinstantiate();
data_map.insert(good_path, Default::default());
}
}
for item in data.db.cpp_items() {
if item.source_id.is_some() {
continue;
}
if let CppItem::Function(function) = &item.item {
if function.is_private() {
continue;
}
let item_text = function.short_text();
for t in &function.arguments {
check_type(&t.argument_type, false, &mut data_map, &item_text);
}
check_type(&function.return_type, false, &mut data_map, &item_text);
if function.is_virtual() {
let type1 = function.class_path()?;
let good_path = type1.deinstantiate();
if let Some(stats) = data_map.get_mut(&good_path) {
if stats.virtual_functions.len() < MAX_ITEMS {
stats.virtual_functions.push(item_text);
}
}
}
}
}
log_results(&data_map);
Ok(())
}
|
{
let good_path = path.deinstantiate();
if let Some(stats) = data_map.get_mut(&good_path) {
if is_behind_pointer {
if stats.pointer_encounters.len() < MAX_ITEMS {
stats.pointer_encounters.push(item_text.to_string());
}
} else if stats.non_pointer_encounters.len() < MAX_ITEMS {
stats.non_pointer_encounters.push(item_text.to_string());
}
}
if let Some(args) = &path.last().template_arguments {
for arg in args {
check_type(arg, false, data_map, item_text);
}
}
}
|
conditional_block
|
type_allocation_places.rs
|
#![allow(dead_code)]
use crate::config::MovableTypesHookOutput;
use crate::cpp_data::{CppItem, CppPath};
use crate::cpp_type::{CppPointerLikeTypeKind, CppType};
use crate::processor::ProcessorData;
use log::{info, trace};
use ritual_common::errors::Result;
use std::collections::HashMap;
#[derive(Default, Debug)]
struct TypeStats {
virtual_functions: Vec<String>,
pointer_encounters: Vec<String>,
non_pointer_encounters: Vec<String>,
}
fn
|
(data_map: &HashMap<CppPath, TypeStats>) {
for (name, stats) in data_map {
trace!("type = {}; stats = {:?}", name.to_cpp_pseudo_code(), stats);
}
for (path, stats) in data_map {
let suggestion = if stats.virtual_functions.is_empty() {
if stats.pointer_encounters.is_empty() {
if stats.non_pointer_encounters.len() == MAX_ITEMS {
"movable (no pointers, no virtual functions)"
} else {
"probably movable (no pointers, no virtual functions, but too few items)"
}
} else if stats.pointer_encounters.len() < 5
&& stats.non_pointer_encounters.len() == MAX_ITEMS
{
"probably movable (few pointers)"
} else if stats.pointer_encounters.len() == MAX_ITEMS {
"immovable (many pointers)"
} else {
"unknown (too few items)"
}
} else {
"immovable (has virtual functions)"
};
info!("{:?} is {}", path.to_templateless_string(), suggestion);
info!("path = {}", path.to_cpp_pseudo_code());
info!("* virtual_functions ({}):", stats.virtual_functions.len());
for item in &stats.virtual_functions {
info!("* * {}", item);
}
info!("* pointer_encounters ({}):", stats.pointer_encounters.len());
for item in &stats.pointer_encounters {
info!("* * {}", item);
}
info!(
"* non_pointer_encounters ({}):",
stats.non_pointer_encounters.len()
);
for item in &stats.non_pointer_encounters {
info!("* * {}", item);
}
}
}
fn check_type(
cpp_type: &CppType,
is_behind_pointer: bool,
data_map: &mut HashMap<CppPath, TypeStats>,
item_text: &str,
) {
match cpp_type {
CppType::Class(path) => {
let good_path = path.deinstantiate();
if let Some(stats) = data_map.get_mut(&good_path) {
if is_behind_pointer {
if stats.pointer_encounters.len() < MAX_ITEMS {
stats.pointer_encounters.push(item_text.to_string());
}
} else if stats.non_pointer_encounters.len() < MAX_ITEMS {
stats.non_pointer_encounters.push(item_text.to_string());
}
}
if let Some(args) = &path.last().template_arguments {
for arg in args {
check_type(arg, false, data_map, item_text);
}
}
}
CppType::PointerLike { kind, target,.. } => {
check_type(
target,
*kind == CppPointerLikeTypeKind::Pointer,
data_map,
item_text,
);
}
_ => {}
}
}
const MAX_ITEMS: usize = 10;
/// Detects the preferred type allocation place for each type based on
/// API of all known methods. Doesn't actually change the data,
/// only suggests stack allocated types for manual configuration.
pub fn suggest_allocation_places(data: &mut ProcessorData<'_>) -> Result<()> {
let mut data_map = HashMap::new();
for item in data.db.cpp_items() {
if item.source_id.is_some() {
continue;
}
if let CppItem::Type(type1) = &item.item {
if!type1.kind.is_class() {
continue;
}
if let Some(hook) = data.config.movable_types_hook() {
if hook(&type1.path)?!= MovableTypesHookOutput::Unknown {
continue;
}
}
let good_path = type1.path.deinstantiate();
data_map.insert(good_path, Default::default());
}
}
for item in data.db.cpp_items() {
if item.source_id.is_some() {
continue;
}
if let CppItem::Function(function) = &item.item {
if function.is_private() {
continue;
}
let item_text = function.short_text();
for t in &function.arguments {
check_type(&t.argument_type, false, &mut data_map, &item_text);
}
check_type(&function.return_type, false, &mut data_map, &item_text);
if function.is_virtual() {
let type1 = function.class_path()?;
let good_path = type1.deinstantiate();
if let Some(stats) = data_map.get_mut(&good_path) {
if stats.virtual_functions.len() < MAX_ITEMS {
stats.virtual_functions.push(item_text);
}
}
}
}
}
log_results(&data_map);
Ok(())
}
|
log_results
|
identifier_name
|
type_allocation_places.rs
|
#![allow(dead_code)]
use crate::config::MovableTypesHookOutput;
use crate::cpp_data::{CppItem, CppPath};
use crate::cpp_type::{CppPointerLikeTypeKind, CppType};
use crate::processor::ProcessorData;
use log::{info, trace};
use ritual_common::errors::Result;
use std::collections::HashMap;
#[derive(Default, Debug)]
struct TypeStats {
virtual_functions: Vec<String>,
pointer_encounters: Vec<String>,
non_pointer_encounters: Vec<String>,
}
fn log_results(data_map: &HashMap<CppPath, TypeStats>) {
for (name, stats) in data_map {
trace!("type = {}; stats = {:?}", name.to_cpp_pseudo_code(), stats);
}
for (path, stats) in data_map {
let suggestion = if stats.virtual_functions.is_empty() {
if stats.pointer_encounters.is_empty() {
if stats.non_pointer_encounters.len() == MAX_ITEMS {
"movable (no pointers, no virtual functions)"
} else {
"probably movable (no pointers, no virtual functions, but too few items)"
}
} else if stats.pointer_encounters.len() < 5
&& stats.non_pointer_encounters.len() == MAX_ITEMS
{
"probably movable (few pointers)"
} else if stats.pointer_encounters.len() == MAX_ITEMS {
"immovable (many pointers)"
} else {
"unknown (too few items)"
}
} else {
"immovable (has virtual functions)"
};
info!("{:?} is {}", path.to_templateless_string(), suggestion);
info!("path = {}", path.to_cpp_pseudo_code());
info!("* virtual_functions ({}):", stats.virtual_functions.len());
|
}
info!("* pointer_encounters ({}):", stats.pointer_encounters.len());
for item in &stats.pointer_encounters {
info!("* * {}", item);
}
info!(
"* non_pointer_encounters ({}):",
stats.non_pointer_encounters.len()
);
for item in &stats.non_pointer_encounters {
info!("* * {}", item);
}
}
}
fn check_type(
cpp_type: &CppType,
is_behind_pointer: bool,
data_map: &mut HashMap<CppPath, TypeStats>,
item_text: &str,
) {
match cpp_type {
CppType::Class(path) => {
let good_path = path.deinstantiate();
if let Some(stats) = data_map.get_mut(&good_path) {
if is_behind_pointer {
if stats.pointer_encounters.len() < MAX_ITEMS {
stats.pointer_encounters.push(item_text.to_string());
}
} else if stats.non_pointer_encounters.len() < MAX_ITEMS {
stats.non_pointer_encounters.push(item_text.to_string());
}
}
if let Some(args) = &path.last().template_arguments {
for arg in args {
check_type(arg, false, data_map, item_text);
}
}
}
CppType::PointerLike { kind, target,.. } => {
check_type(
target,
*kind == CppPointerLikeTypeKind::Pointer,
data_map,
item_text,
);
}
_ => {}
}
}
const MAX_ITEMS: usize = 10;
/// Detects the preferred type allocation place for each type based on
/// API of all known methods. Doesn't actually change the data,
/// only suggests stack allocated types for manual configuration.
pub fn suggest_allocation_places(data: &mut ProcessorData<'_>) -> Result<()> {
let mut data_map = HashMap::new();
for item in data.db.cpp_items() {
if item.source_id.is_some() {
continue;
}
if let CppItem::Type(type1) = &item.item {
if!type1.kind.is_class() {
continue;
}
if let Some(hook) = data.config.movable_types_hook() {
if hook(&type1.path)?!= MovableTypesHookOutput::Unknown {
continue;
}
}
let good_path = type1.path.deinstantiate();
data_map.insert(good_path, Default::default());
}
}
for item in data.db.cpp_items() {
if item.source_id.is_some() {
continue;
}
if let CppItem::Function(function) = &item.item {
if function.is_private() {
continue;
}
let item_text = function.short_text();
for t in &function.arguments {
check_type(&t.argument_type, false, &mut data_map, &item_text);
}
check_type(&function.return_type, false, &mut data_map, &item_text);
if function.is_virtual() {
let type1 = function.class_path()?;
let good_path = type1.deinstantiate();
if let Some(stats) = data_map.get_mut(&good_path) {
if stats.virtual_functions.len() < MAX_ITEMS {
stats.virtual_functions.push(item_text);
}
}
}
}
}
log_results(&data_map);
Ok(())
}
|
for item in &stats.virtual_functions {
info!("* * {}", item);
|
random_line_split
|
type_allocation_places.rs
|
#![allow(dead_code)]
use crate::config::MovableTypesHookOutput;
use crate::cpp_data::{CppItem, CppPath};
use crate::cpp_type::{CppPointerLikeTypeKind, CppType};
use crate::processor::ProcessorData;
use log::{info, trace};
use ritual_common::errors::Result;
use std::collections::HashMap;
#[derive(Default, Debug)]
struct TypeStats {
virtual_functions: Vec<String>,
pointer_encounters: Vec<String>,
non_pointer_encounters: Vec<String>,
}
fn log_results(data_map: &HashMap<CppPath, TypeStats>) {
for (name, stats) in data_map {
trace!("type = {}; stats = {:?}", name.to_cpp_pseudo_code(), stats);
}
for (path, stats) in data_map {
let suggestion = if stats.virtual_functions.is_empty() {
if stats.pointer_encounters.is_empty() {
if stats.non_pointer_encounters.len() == MAX_ITEMS {
"movable (no pointers, no virtual functions)"
} else {
"probably movable (no pointers, no virtual functions, but too few items)"
}
} else if stats.pointer_encounters.len() < 5
&& stats.non_pointer_encounters.len() == MAX_ITEMS
{
"probably movable (few pointers)"
} else if stats.pointer_encounters.len() == MAX_ITEMS {
"immovable (many pointers)"
} else {
"unknown (too few items)"
}
} else {
"immovable (has virtual functions)"
};
info!("{:?} is {}", path.to_templateless_string(), suggestion);
info!("path = {}", path.to_cpp_pseudo_code());
info!("* virtual_functions ({}):", stats.virtual_functions.len());
for item in &stats.virtual_functions {
info!("* * {}", item);
}
info!("* pointer_encounters ({}):", stats.pointer_encounters.len());
for item in &stats.pointer_encounters {
info!("* * {}", item);
}
info!(
"* non_pointer_encounters ({}):",
stats.non_pointer_encounters.len()
);
for item in &stats.non_pointer_encounters {
info!("* * {}", item);
}
}
}
fn check_type(
cpp_type: &CppType,
is_behind_pointer: bool,
data_map: &mut HashMap<CppPath, TypeStats>,
item_text: &str,
)
|
CppType::PointerLike { kind, target,.. } => {
check_type(
target,
*kind == CppPointerLikeTypeKind::Pointer,
data_map,
item_text,
);
}
_ => {}
}
}
const MAX_ITEMS: usize = 10;
/// Detects the preferred type allocation place for each type based on
/// API of all known methods. Doesn't actually change the data,
/// only suggests stack allocated types for manual configuration.
pub fn suggest_allocation_places(data: &mut ProcessorData<'_>) -> Result<()> {
let mut data_map = HashMap::new();
for item in data.db.cpp_items() {
if item.source_id.is_some() {
continue;
}
if let CppItem::Type(type1) = &item.item {
if!type1.kind.is_class() {
continue;
}
if let Some(hook) = data.config.movable_types_hook() {
if hook(&type1.path)?!= MovableTypesHookOutput::Unknown {
continue;
}
}
let good_path = type1.path.deinstantiate();
data_map.insert(good_path, Default::default());
}
}
for item in data.db.cpp_items() {
if item.source_id.is_some() {
continue;
}
if let CppItem::Function(function) = &item.item {
if function.is_private() {
continue;
}
let item_text = function.short_text();
for t in &function.arguments {
check_type(&t.argument_type, false, &mut data_map, &item_text);
}
check_type(&function.return_type, false, &mut data_map, &item_text);
if function.is_virtual() {
let type1 = function.class_path()?;
let good_path = type1.deinstantiate();
if let Some(stats) = data_map.get_mut(&good_path) {
if stats.virtual_functions.len() < MAX_ITEMS {
stats.virtual_functions.push(item_text);
}
}
}
}
}
log_results(&data_map);
Ok(())
}
|
{
match cpp_type {
CppType::Class(path) => {
let good_path = path.deinstantiate();
if let Some(stats) = data_map.get_mut(&good_path) {
if is_behind_pointer {
if stats.pointer_encounters.len() < MAX_ITEMS {
stats.pointer_encounters.push(item_text.to_string());
}
} else if stats.non_pointer_encounters.len() < MAX_ITEMS {
stats.non_pointer_encounters.push(item_text.to_string());
}
}
if let Some(args) = &path.last().template_arguments {
for arg in args {
check_type(arg, false, data_map, item_text);
}
}
}
|
identifier_body
|
join_room_by_id_or_alias.rs
|
//! [POST /_matrix/client/r0/join/{roomIdOrAlias}](https://matrix.org/docs/spec/client_server/r0.6.0#post-matrix-client-r0-join-roomidoralias)
use ruma_api::ruma_api;
use ruma_identifiers::{RoomId, RoomIdOrAliasId};
use super::ThirdPartySigned;
ruma_api! {
metadata {
description: "Join a room using its ID or one of its aliases.",
method: POST,
name: "join_room_by_id_or_alias",
path: "/_matrix/client/r0/join/:room_id_or_alias",
|
/// The room where the user should be invited.
#[ruma_api(path)]
pub room_id_or_alias: RoomIdOrAliasId,
/// The servers to attempt to join the room through. One of the servers
/// must be participating in the room.
#[ruma_api(query)]
#[serde(default)]
pub server_name: Vec<String>,
/// The signature of a `m.third_party_invite` token to prove that this user owns a third
/// party identity which has been invited to the room.
#[serde(skip_serializing_if = "Option::is_none")]
pub third_party_signed: Option<ThirdPartySigned>,
}
response {
/// The room that the user joined.
pub room_id: RoomId,
}
error: crate::Error
}
|
rate_limited: true,
requires_authentication: true,
}
request {
|
random_line_split
|
lib.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Lints in the Rust compiler.
//!
//! This currently only contains the definitions and implementations
//! of most of the lints that `rustc` supports directly, it does not
//! contain the infrastructure for defining/registering lints. That is
//! available in `rustc::lint` and `rustc::plugin` respectively.
//!
//! # Note
//!
//! This API is completely unstable and subject to change.
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "rustc_lint"]
#![unstable(feature = "rustc_private")]
#![staged_api]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/")]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(collections)]
#![feature(core)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(str_char)]
#![cfg_attr(test, feature(test))]
extern crate syntax;
#[macro_use]
extern crate rustc;
#[macro_use]
extern crate log;
pub use rustc::lint as lint;
pub use rustc::metadata as metadata;
|
use session::Session;
use lint::LintId;
mod builtin;
/// Tell the `LintStore` about all the built-in lints (the ones
/// defined in this crate and the ones defined in
/// `rustc::lint::builtin`).
pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) {
macro_rules! add_builtin {
($sess:ident, $($name:ident),*,) => (
{$(
store.register_pass($sess, false, box builtin::$name);
)*}
)
}
macro_rules! add_builtin_with_new {
($sess:ident, $($name:ident),*,) => (
{$(
store.register_pass($sess, false, box builtin::$name::new());
)*}
)
}
macro_rules! add_lint_group {
($sess:ident, $name:expr, $($lint:ident),*) => (
store.register_group($sess, false, $name, vec![$(LintId::of(builtin::$lint)),*]);
)
}
add_builtin!(sess,
HardwiredLints,
WhileTrue,
ImproperCTypes,
BoxPointers,
UnusedAttributes,
PathStatements,
UnusedResults,
NonCamelCaseTypes,
NonSnakeCase,
NonUpperCaseGlobals,
UnusedParens,
UnusedImportBraces,
NonShorthandFieldPatterns,
UnusedUnsafe,
UnsafeCode,
UnusedMut,
UnusedAllocation,
MissingCopyImplementations,
UnstableFeatures,
Stability,
UnconditionalRecursion,
InvalidNoMangleItems,
PluginAsLibrary,
DropWithReprExtern,
MutableTransmutes,
);
add_builtin_with_new!(sess,
TypeLimits,
RawPointerDerive,
MissingDoc,
MissingDebugImplementations,
);
add_lint_group!(sess, "bad_style",
NON_CAMEL_CASE_TYPES, NON_SNAKE_CASE, NON_UPPER_CASE_GLOBALS);
add_lint_group!(sess, "unused",
UNUSED_IMPORTS, UNUSED_VARIABLES, UNUSED_ASSIGNMENTS, DEAD_CODE,
UNUSED_MUT, UNREACHABLE_CODE, UNUSED_MUST_USE,
UNUSED_UNSAFE, PATH_STATEMENTS);
// We have one lint pass defined specially
store.register_pass(sess, false, box lint::GatherNodeLevels);
// Insert temporary renamings for a one-time deprecation
store.register_renamed("raw_pointer_deriving", "raw_pointer_derive");
store.register_renamed("unknown_features", "unused_features");
}
|
pub use rustc::middle as middle;
pub use rustc::session as session;
pub use rustc::util as util;
|
random_line_split
|
lib.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Lints in the Rust compiler.
//!
//! This currently only contains the definitions and implementations
//! of most of the lints that `rustc` supports directly, it does not
//! contain the infrastructure for defining/registering lints. That is
//! available in `rustc::lint` and `rustc::plugin` respectively.
//!
//! # Note
//!
//! This API is completely unstable and subject to change.
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "rustc_lint"]
#![unstable(feature = "rustc_private")]
#![staged_api]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/")]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(collections)]
#![feature(core)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(str_char)]
#![cfg_attr(test, feature(test))]
extern crate syntax;
#[macro_use]
extern crate rustc;
#[macro_use]
extern crate log;
pub use rustc::lint as lint;
pub use rustc::metadata as metadata;
pub use rustc::middle as middle;
pub use rustc::session as session;
pub use rustc::util as util;
use session::Session;
use lint::LintId;
mod builtin;
/// Tell the `LintStore` about all the built-in lints (the ones
/// defined in this crate and the ones defined in
/// `rustc::lint::builtin`).
pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>)
|
)
}
add_builtin!(sess,
HardwiredLints,
WhileTrue,
ImproperCTypes,
BoxPointers,
UnusedAttributes,
PathStatements,
UnusedResults,
NonCamelCaseTypes,
NonSnakeCase,
NonUpperCaseGlobals,
UnusedParens,
UnusedImportBraces,
NonShorthandFieldPatterns,
UnusedUnsafe,
UnsafeCode,
UnusedMut,
UnusedAllocation,
MissingCopyImplementations,
UnstableFeatures,
Stability,
UnconditionalRecursion,
InvalidNoMangleItems,
PluginAsLibrary,
DropWithReprExtern,
MutableTransmutes,
);
add_builtin_with_new!(sess,
TypeLimits,
RawPointerDerive,
MissingDoc,
MissingDebugImplementations,
);
add_lint_group!(sess, "bad_style",
NON_CAMEL_CASE_TYPES, NON_SNAKE_CASE, NON_UPPER_CASE_GLOBALS);
add_lint_group!(sess, "unused",
UNUSED_IMPORTS, UNUSED_VARIABLES, UNUSED_ASSIGNMENTS, DEAD_CODE,
UNUSED_MUT, UNREACHABLE_CODE, UNUSED_MUST_USE,
UNUSED_UNSAFE, PATH_STATEMENTS);
// We have one lint pass defined specially
store.register_pass(sess, false, box lint::GatherNodeLevels);
// Insert temporary renamings for a one-time deprecation
store.register_renamed("raw_pointer_deriving", "raw_pointer_derive");
store.register_renamed("unknown_features", "unused_features");
}
|
{
macro_rules! add_builtin {
($sess:ident, $($name:ident),*,) => (
{$(
store.register_pass($sess, false, box builtin::$name);
)*}
)
}
macro_rules! add_builtin_with_new {
($sess:ident, $($name:ident),*,) => (
{$(
store.register_pass($sess, false, box builtin::$name::new());
)*}
)
}
macro_rules! add_lint_group {
($sess:ident, $name:expr, $($lint:ident),*) => (
store.register_group($sess, false, $name, vec![$(LintId::of(builtin::$lint)),*]);
|
identifier_body
|
lib.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Lints in the Rust compiler.
//!
//! This currently only contains the definitions and implementations
//! of most of the lints that `rustc` supports directly, it does not
//! contain the infrastructure for defining/registering lints. That is
//! available in `rustc::lint` and `rustc::plugin` respectively.
//!
//! # Note
//!
//! This API is completely unstable and subject to change.
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "rustc_lint"]
#![unstable(feature = "rustc_private")]
#![staged_api]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/")]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(collections)]
#![feature(core)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(str_char)]
#![cfg_attr(test, feature(test))]
extern crate syntax;
#[macro_use]
extern crate rustc;
#[macro_use]
extern crate log;
pub use rustc::lint as lint;
pub use rustc::metadata as metadata;
pub use rustc::middle as middle;
pub use rustc::session as session;
pub use rustc::util as util;
use session::Session;
use lint::LintId;
mod builtin;
/// Tell the `LintStore` about all the built-in lints (the ones
/// defined in this crate and the ones defined in
/// `rustc::lint::builtin`).
pub fn
|
(store: &mut lint::LintStore, sess: Option<&Session>) {
macro_rules! add_builtin {
($sess:ident, $($name:ident),*,) => (
{$(
store.register_pass($sess, false, box builtin::$name);
)*}
)
}
macro_rules! add_builtin_with_new {
($sess:ident, $($name:ident),*,) => (
{$(
store.register_pass($sess, false, box builtin::$name::new());
)*}
)
}
macro_rules! add_lint_group {
($sess:ident, $name:expr, $($lint:ident),*) => (
store.register_group($sess, false, $name, vec![$(LintId::of(builtin::$lint)),*]);
)
}
add_builtin!(sess,
HardwiredLints,
WhileTrue,
ImproperCTypes,
BoxPointers,
UnusedAttributes,
PathStatements,
UnusedResults,
NonCamelCaseTypes,
NonSnakeCase,
NonUpperCaseGlobals,
UnusedParens,
UnusedImportBraces,
NonShorthandFieldPatterns,
UnusedUnsafe,
UnsafeCode,
UnusedMut,
UnusedAllocation,
MissingCopyImplementations,
UnstableFeatures,
Stability,
UnconditionalRecursion,
InvalidNoMangleItems,
PluginAsLibrary,
DropWithReprExtern,
MutableTransmutes,
);
add_builtin_with_new!(sess,
TypeLimits,
RawPointerDerive,
MissingDoc,
MissingDebugImplementations,
);
add_lint_group!(sess, "bad_style",
NON_CAMEL_CASE_TYPES, NON_SNAKE_CASE, NON_UPPER_CASE_GLOBALS);
add_lint_group!(sess, "unused",
UNUSED_IMPORTS, UNUSED_VARIABLES, UNUSED_ASSIGNMENTS, DEAD_CODE,
UNUSED_MUT, UNREACHABLE_CODE, UNUSED_MUST_USE,
UNUSED_UNSAFE, PATH_STATEMENTS);
// We have one lint pass defined specially
store.register_pass(sess, false, box lint::GatherNodeLevels);
// Insert temporary renamings for a one-time deprecation
store.register_renamed("raw_pointer_deriving", "raw_pointer_derive");
store.register_renamed("unknown_features", "unused_features");
}
|
register_builtins
|
identifier_name
|
the_test.rs
|
use std::env;
use std::process;
use std::sync::Arc;
fn main() {
log_ndc_env_logger::init();
let mut server = httpbis::ServerBuilder::new_plain();
server
.service
.set_service("/", Arc::new(httpbis_h2spec_test::Ok200));
server.set_port(8888);
let _server = server.build().expect("server.build()");
let mut h2spec = process::Command::new("h2spec")
.args(&["-p", "8888", "-v"])
.args(env::args().skip(1))
|
.stderr(process::Stdio::inherit())
.spawn()
.expect("spawn h2spec");
let exit_status = h2spec.wait().expect("h2spec wait");
assert!(exit_status.success(), "{}", exit_status);
}
|
.stdin(process::Stdio::null())
.stdout(process::Stdio::inherit())
|
random_line_split
|
the_test.rs
|
use std::env;
use std::process;
use std::sync::Arc;
fn
|
() {
log_ndc_env_logger::init();
let mut server = httpbis::ServerBuilder::new_plain();
server
.service
.set_service("/", Arc::new(httpbis_h2spec_test::Ok200));
server.set_port(8888);
let _server = server.build().expect("server.build()");
let mut h2spec = process::Command::new("h2spec")
.args(&["-p", "8888", "-v"])
.args(env::args().skip(1))
.stdin(process::Stdio::null())
.stdout(process::Stdio::inherit())
.stderr(process::Stdio::inherit())
.spawn()
.expect("spawn h2spec");
let exit_status = h2spec.wait().expect("h2spec wait");
assert!(exit_status.success(), "{}", exit_status);
}
|
main
|
identifier_name
|
the_test.rs
|
use std::env;
use std::process;
use std::sync::Arc;
fn main()
|
assert!(exit_status.success(), "{}", exit_status);
}
|
{
log_ndc_env_logger::init();
let mut server = httpbis::ServerBuilder::new_plain();
server
.service
.set_service("/", Arc::new(httpbis_h2spec_test::Ok200));
server.set_port(8888);
let _server = server.build().expect("server.build()");
let mut h2spec = process::Command::new("h2spec")
.args(&["-p", "8888", "-v"])
.args(env::args().skip(1))
.stdin(process::Stdio::null())
.stdout(process::Stdio::inherit())
.stderr(process::Stdio::inherit())
.spawn()
.expect("spawn h2spec");
let exit_status = h2spec.wait().expect("h2spec wait");
|
identifier_body
|
mod.rs
|
mod compile_mx;
pub mod cpp;
pub mod paths;
mod template;
use crate::error::Result;
use crate::generate::cpp::modeler::MxModeler;
use crate::generate::paths::Paths;
use crate::model::create::Create;
use crate::model::creator::Creator;
use crate::model::post_process::PostProcess;
use crate::model::transform::Transform;
use crate::xsd::Xsd;
use cpp::constants::enum_member_substitutions;
use cpp::constants::{pseudo_enums, reserved_words, suffixed_enum_names};
use std::fs::read_to_string;
use std::sync::Arc;
#[derive(Clone, Debug)]
pub struct GenArgs {
pub paths: Paths,
}
impl Default for GenArgs {
fn default() -> Self {
Self {
paths: Paths::default(),
}
}
}
/// Generate `mx::core` in C++
pub fn
|
(args: GenArgs) -> Result<()> {
let xsd = read_to_string(&args.paths.xsd_3_0).unwrap();
let doc = exile::parse(xsd.as_str()).unwrap();
let new_xsd = Xsd::load(&args.paths.xsd_3_0)?;
let transforms: Vec<Box<dyn Transform>> = vec![Box::new(MxModeler::new())];
let creates: Vec<Box<dyn Create>> = vec![Box::new(MxModeler::new())];
let post_processors: Vec<Box<dyn PostProcess>> = vec![Box::new(MxModeler::new())];
let creator = Creator::new_with_default(Some(transforms), Some(creates), Some(post_processors));
let models = creator.create(&new_xsd)?;
let cpp_writer = cpp::writer::Writer::new(models);
cpp_writer.write_code()?;
Ok(())
}
|
run
|
identifier_name
|
mod.rs
|
mod compile_mx;
pub mod cpp;
pub mod paths;
mod template;
use crate::error::Result;
use crate::generate::cpp::modeler::MxModeler;
use crate::generate::paths::Paths;
use crate::model::create::Create;
use crate::model::creator::Creator;
use crate::model::post_process::PostProcess;
use crate::model::transform::Transform;
use crate::xsd::Xsd;
use cpp::constants::enum_member_substitutions;
use cpp::constants::{pseudo_enums, reserved_words, suffixed_enum_names};
use std::fs::read_to_string;
use std::sync::Arc;
#[derive(Clone, Debug)]
pub struct GenArgs {
pub paths: Paths,
}
impl Default for GenArgs {
fn default() -> Self
|
}
/// Generate `mx::core` in C++
pub fn run(args: GenArgs) -> Result<()> {
let xsd = read_to_string(&args.paths.xsd_3_0).unwrap();
let doc = exile::parse(xsd.as_str()).unwrap();
let new_xsd = Xsd::load(&args.paths.xsd_3_0)?;
let transforms: Vec<Box<dyn Transform>> = vec![Box::new(MxModeler::new())];
let creates: Vec<Box<dyn Create>> = vec![Box::new(MxModeler::new())];
let post_processors: Vec<Box<dyn PostProcess>> = vec![Box::new(MxModeler::new())];
let creator = Creator::new_with_default(Some(transforms), Some(creates), Some(post_processors));
let models = creator.create(&new_xsd)?;
let cpp_writer = cpp::writer::Writer::new(models);
cpp_writer.write_code()?;
Ok(())
}
|
{
Self {
paths: Paths::default(),
}
}
|
identifier_body
|
mod.rs
|
mod compile_mx;
pub mod cpp;
pub mod paths;
mod template;
use crate::error::Result;
use crate::generate::cpp::modeler::MxModeler;
use crate::generate::paths::Paths;
use crate::model::create::Create;
use crate::model::creator::Creator;
use crate::model::post_process::PostProcess;
use crate::model::transform::Transform;
use crate::xsd::Xsd;
use cpp::constants::enum_member_substitutions;
use cpp::constants::{pseudo_enums, reserved_words, suffixed_enum_names};
use std::fs::read_to_string;
use std::sync::Arc;
#[derive(Clone, Debug)]
pub struct GenArgs {
pub paths: Paths,
}
impl Default for GenArgs {
fn default() -> Self {
Self {
paths: Paths::default(),
}
}
}
/// Generate `mx::core` in C++
pub fn run(args: GenArgs) -> Result<()> {
let xsd = read_to_string(&args.paths.xsd_3_0).unwrap();
let doc = exile::parse(xsd.as_str()).unwrap();
let new_xsd = Xsd::load(&args.paths.xsd_3_0)?;
let transforms: Vec<Box<dyn Transform>> = vec![Box::new(MxModeler::new())];
|
let creates: Vec<Box<dyn Create>> = vec![Box::new(MxModeler::new())];
let post_processors: Vec<Box<dyn PostProcess>> = vec![Box::new(MxModeler::new())];
let creator = Creator::new_with_default(Some(transforms), Some(creates), Some(post_processors));
let models = creator.create(&new_xsd)?;
let cpp_writer = cpp::writer::Writer::new(models);
cpp_writer.write_code()?;
Ok(())
}
|
random_line_split
|
|
cdgdec.rs
|
// Copyright (C) 2019 Guillaume Desmottes <[email protected]>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use gst::prelude::*;
use std::path::PathBuf;
fn init() {
use std::sync::Once;
static INIT: Once = Once::new();
INIT.call_once(|| {
gst::init().unwrap();
gstcdg::plugin_register_static().expect("cdgdec tests");
});
}
#[test]
fn
|
() {
init();
let pipeline = gst::Pipeline::new(Some("cdgdec-test"));
let input_path = {
let mut r = PathBuf::new();
r.push(env!("CARGO_MANIFEST_DIR"));
r.push("tests");
r.push("BrotherJohn");
r.set_extension("cdg");
r
};
// Ensure we are in push mode so 'blocksize' prop is used
let filesrc = gst::ElementFactory::make("pushfilesrc", None).unwrap();
filesrc
.set_property("location", &input_path.to_str().unwrap())
.expect("failed to set 'location' property");
{
let child_proxy = filesrc.dynamic_cast_ref::<gst::ChildProxy>().unwrap();
child_proxy
.set_child_property("real-filesrc::num-buffers", &1)
.expect("failed to set 'num-buffers' property");
let blocksize: u32 = 24; // One CDG instruction
child_proxy
.set_child_property("real-filesrc::blocksize", &blocksize)
.expect("failed to set 'blocksize' property");
}
let parse = gst::ElementFactory::make("cdgparse", None).unwrap();
let dec = gst::ElementFactory::make("cdgdec", None).unwrap();
let sink = gst::ElementFactory::make("appsink", None).unwrap();
pipeline
.add_many(&[&filesrc, &parse, &dec, &sink])
.expect("failed to add elements to the pipeline");
gst::Element::link_many(&[&filesrc, &parse, &dec, &sink]).expect("failed to link the elements");
let sink = sink.downcast::<gst_app::AppSink>().unwrap();
sink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
// Add a handler to the "new-sample" signal.
.new_sample(move |appsink| {
// Pull the sample in question out of the appsink's buffer.
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.get_buffer().ok_or(gst::FlowError::Error)?;
let map = buffer.map_readable().map_err(|_| gst::FlowError::Error)?;
// First frame fully blue
map.as_slice()
.chunks_exact(4)
.for_each(|p| assert_eq!(p, [0, 0, 136, 255]));
Ok(gst::FlowSuccess::Ok)
})
.build(),
);
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let bus = pipeline.get_bus().unwrap();
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Error(err) => {
eprintln!(
"Error received from element {:?}: {}",
err.get_src().map(|s| s.get_path_string()),
err.get_error()
);
eprintln!("Debugging information: {:?}", err.get_debug());
unreachable!();
}
MessageView::Eos(..) => break,
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
|
test_cdgdec
|
identifier_name
|
cdgdec.rs
|
// Copyright (C) 2019 Guillaume Desmottes <[email protected]>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use gst::prelude::*;
use std::path::PathBuf;
fn init() {
use std::sync::Once;
static INIT: Once = Once::new();
INIT.call_once(|| {
gst::init().unwrap();
gstcdg::plugin_register_static().expect("cdgdec tests");
});
}
#[test]
fn test_cdgdec() {
init();
let pipeline = gst::Pipeline::new(Some("cdgdec-test"));
let input_path = {
let mut r = PathBuf::new();
r.push(env!("CARGO_MANIFEST_DIR"));
r.push("tests");
r.push("BrotherJohn");
r.set_extension("cdg");
r
};
// Ensure we are in push mode so 'blocksize' prop is used
let filesrc = gst::ElementFactory::make("pushfilesrc", None).unwrap();
filesrc
.set_property("location", &input_path.to_str().unwrap())
.expect("failed to set 'location' property");
{
let child_proxy = filesrc.dynamic_cast_ref::<gst::ChildProxy>().unwrap();
child_proxy
.set_child_property("real-filesrc::num-buffers", &1)
|
child_proxy
.set_child_property("real-filesrc::blocksize", &blocksize)
.expect("failed to set 'blocksize' property");
}
let parse = gst::ElementFactory::make("cdgparse", None).unwrap();
let dec = gst::ElementFactory::make("cdgdec", None).unwrap();
let sink = gst::ElementFactory::make("appsink", None).unwrap();
pipeline
.add_many(&[&filesrc, &parse, &dec, &sink])
.expect("failed to add elements to the pipeline");
gst::Element::link_many(&[&filesrc, &parse, &dec, &sink]).expect("failed to link the elements");
let sink = sink.downcast::<gst_app::AppSink>().unwrap();
sink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
// Add a handler to the "new-sample" signal.
.new_sample(move |appsink| {
// Pull the sample in question out of the appsink's buffer.
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.get_buffer().ok_or(gst::FlowError::Error)?;
let map = buffer.map_readable().map_err(|_| gst::FlowError::Error)?;
// First frame fully blue
map.as_slice()
.chunks_exact(4)
.for_each(|p| assert_eq!(p, [0, 0, 136, 255]));
Ok(gst::FlowSuccess::Ok)
})
.build(),
);
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let bus = pipeline.get_bus().unwrap();
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Error(err) => {
eprintln!(
"Error received from element {:?}: {}",
err.get_src().map(|s| s.get_path_string()),
err.get_error()
);
eprintln!("Debugging information: {:?}", err.get_debug());
unreachable!();
}
MessageView::Eos(..) => break,
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
|
.expect("failed to set 'num-buffers' property");
let blocksize: u32 = 24; // One CDG instruction
|
random_line_split
|
cdgdec.rs
|
// Copyright (C) 2019 Guillaume Desmottes <[email protected]>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use gst::prelude::*;
use std::path::PathBuf;
fn init()
|
#[test]
fn test_cdgdec() {
init();
let pipeline = gst::Pipeline::new(Some("cdgdec-test"));
let input_path = {
let mut r = PathBuf::new();
r.push(env!("CARGO_MANIFEST_DIR"));
r.push("tests");
r.push("BrotherJohn");
r.set_extension("cdg");
r
};
// Ensure we are in push mode so 'blocksize' prop is used
let filesrc = gst::ElementFactory::make("pushfilesrc", None).unwrap();
filesrc
.set_property("location", &input_path.to_str().unwrap())
.expect("failed to set 'location' property");
{
let child_proxy = filesrc.dynamic_cast_ref::<gst::ChildProxy>().unwrap();
child_proxy
.set_child_property("real-filesrc::num-buffers", &1)
.expect("failed to set 'num-buffers' property");
let blocksize: u32 = 24; // One CDG instruction
child_proxy
.set_child_property("real-filesrc::blocksize", &blocksize)
.expect("failed to set 'blocksize' property");
}
let parse = gst::ElementFactory::make("cdgparse", None).unwrap();
let dec = gst::ElementFactory::make("cdgdec", None).unwrap();
let sink = gst::ElementFactory::make("appsink", None).unwrap();
pipeline
.add_many(&[&filesrc, &parse, &dec, &sink])
.expect("failed to add elements to the pipeline");
gst::Element::link_many(&[&filesrc, &parse, &dec, &sink]).expect("failed to link the elements");
let sink = sink.downcast::<gst_app::AppSink>().unwrap();
sink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
// Add a handler to the "new-sample" signal.
.new_sample(move |appsink| {
// Pull the sample in question out of the appsink's buffer.
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.get_buffer().ok_or(gst::FlowError::Error)?;
let map = buffer.map_readable().map_err(|_| gst::FlowError::Error)?;
// First frame fully blue
map.as_slice()
.chunks_exact(4)
.for_each(|p| assert_eq!(p, [0, 0, 136, 255]));
Ok(gst::FlowSuccess::Ok)
})
.build(),
);
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let bus = pipeline.get_bus().unwrap();
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Error(err) => {
eprintln!(
"Error received from element {:?}: {}",
err.get_src().map(|s| s.get_path_string()),
err.get_error()
);
eprintln!("Debugging information: {:?}", err.get_debug());
unreachable!();
}
MessageView::Eos(..) => break,
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
|
{
use std::sync::Once;
static INIT: Once = Once::new();
INIT.call_once(|| {
gst::init().unwrap();
gstcdg::plugin_register_static().expect("cdgdec tests");
});
}
|
identifier_body
|
a.rs
|
/*
* Copyright (C) 2015 Benjamin Fry <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//! Parser for A text form
use std::net::Ipv4Addr;
use std::str::FromStr;
use crate::error::*;
/// Parse the RData from a set of Tokens
pub fn parse<'i, I: Iterator<Item = &'i str>>(mut tokens: I) -> ParseResult<Ipv4Addr>
|
{
let address: Ipv4Addr = tokens
.next()
.ok_or_else(|| ParseError::from(ParseErrorKind::MissingToken("ipv4 address".to_string())))
.and_then(|s| Ipv4Addr::from_str(s).map_err(Into::into))?;
Ok(address)
}
|
identifier_body
|
|
a.rs
|
/*
* Copyright (C) 2015 Benjamin Fry <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
|
use std::net::Ipv4Addr;
use std::str::FromStr;
use crate::error::*;
/// Parse the RData from a set of Tokens
pub fn parse<'i, I: Iterator<Item = &'i str>>(mut tokens: I) -> ParseResult<Ipv4Addr> {
let address: Ipv4Addr = tokens
.next()
.ok_or_else(|| ParseError::from(ParseErrorKind::MissingToken("ipv4 address".to_string())))
.and_then(|s| Ipv4Addr::from_str(s).map_err(Into::into))?;
Ok(address)
}
|
//! Parser for A text form
|
random_line_split
|
a.rs
|
/*
* Copyright (C) 2015 Benjamin Fry <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//! Parser for A text form
use std::net::Ipv4Addr;
use std::str::FromStr;
use crate::error::*;
/// Parse the RData from a set of Tokens
pub fn
|
<'i, I: Iterator<Item = &'i str>>(mut tokens: I) -> ParseResult<Ipv4Addr> {
let address: Ipv4Addr = tokens
.next()
.ok_or_else(|| ParseError::from(ParseErrorKind::MissingToken("ipv4 address".to_string())))
.and_then(|s| Ipv4Addr::from_str(s).map_err(Into::into))?;
Ok(address)
}
|
parse
|
identifier_name
|
lower_caser.rs
|
use super::{Token, TokenFilter, TokenStream};
use crate::tokenizer::BoxTokenStream;
use std::mem;
impl TokenFilter for LowerCaser {
fn transform<'a>(&self, token_stream: BoxTokenStream<'a>) -> BoxTokenStream<'a> {
BoxTokenStream::from(LowerCaserTokenStream {
tail: token_stream,
buffer: String::with_capacity(100),
})
}
}
/// Token filter that lowercase terms.
#[derive(Clone)]
pub struct LowerCaser;
pub struct LowerCaserTokenStream<'a> {
buffer: String,
tail: BoxTokenStream<'a>,
}
// writes a lowercased version of text into output.
fn to_lowercase_unicode(text: &mut String, output: &mut String) {
output.clear();
for c in text.chars() {
// Contrary to the std, we do not take care of sigma special case.
// This will have an normalizationo effect, which is ok for search.
output.extend(c.to_lowercase());
}
}
impl<'a> TokenStream for LowerCaserTokenStream<'a> {
fn
|
(&mut self) -> bool {
if!self.tail.advance() {
return false;
}
if self.token_mut().text.is_ascii() {
// fast track for ascii.
self.token_mut().text.make_ascii_lowercase();
} else {
to_lowercase_unicode(&mut self.tail.token_mut().text, &mut self.buffer);
mem::swap(&mut self.tail.token_mut().text, &mut self.buffer);
}
true
}
fn token(&self) -> &Token {
self.tail.token()
}
fn token_mut(&mut self) -> &mut Token {
self.tail.token_mut()
}
}
#[cfg(test)]
mod tests {
use crate::tokenizer::tests::assert_token;
use crate::tokenizer::{LowerCaser, SimpleTokenizer, TextAnalyzer, Token};
#[test]
fn test_to_lower_case() {
let tokens = token_stream_helper("Tree");
assert_eq!(tokens.len(), 1);
assert_token(&tokens[0], 0, "tree", 0, 4);
let tokens = token_stream_helper("Русский текст");
assert_eq!(tokens.len(), 2);
assert_token(&tokens[0], 0, "русский", 0, 14);
assert_token(&tokens[1], 1, "текст", 15, 25);
}
fn token_stream_helper(text: &str) -> Vec<Token> {
let mut token_stream = TextAnalyzer::from(SimpleTokenizer)
.filter(LowerCaser)
.token_stream(text);
let mut tokens = vec![];
let mut add_token = |token: &Token| {
tokens.push(token.clone());
};
token_stream.process(&mut add_token);
tokens
}
}
|
advance
|
identifier_name
|
lower_caser.rs
|
use super::{Token, TokenFilter, TokenStream};
use crate::tokenizer::BoxTokenStream;
use std::mem;
impl TokenFilter for LowerCaser {
fn transform<'a>(&self, token_stream: BoxTokenStream<'a>) -> BoxTokenStream<'a> {
BoxTokenStream::from(LowerCaserTokenStream {
tail: token_stream,
buffer: String::with_capacity(100),
})
}
}
/// Token filter that lowercase terms.
#[derive(Clone)]
pub struct LowerCaser;
pub struct LowerCaserTokenStream<'a> {
buffer: String,
tail: BoxTokenStream<'a>,
}
// writes a lowercased version of text into output.
fn to_lowercase_unicode(text: &mut String, output: &mut String) {
output.clear();
for c in text.chars() {
// Contrary to the std, we do not take care of sigma special case.
// This will have an normalizationo effect, which is ok for search.
output.extend(c.to_lowercase());
}
}
impl<'a> TokenStream for LowerCaserTokenStream<'a> {
fn advance(&mut self) -> bool {
if!self.tail.advance() {
return false;
}
if self.token_mut().text.is_ascii() {
// fast track for ascii.
self.token_mut().text.make_ascii_lowercase();
} else {
|
}
true
}
fn token(&self) -> &Token {
self.tail.token()
}
fn token_mut(&mut self) -> &mut Token {
self.tail.token_mut()
}
}
#[cfg(test)]
mod tests {
use crate::tokenizer::tests::assert_token;
use crate::tokenizer::{LowerCaser, SimpleTokenizer, TextAnalyzer, Token};
#[test]
fn test_to_lower_case() {
let tokens = token_stream_helper("Tree");
assert_eq!(tokens.len(), 1);
assert_token(&tokens[0], 0, "tree", 0, 4);
let tokens = token_stream_helper("Русский текст");
assert_eq!(tokens.len(), 2);
assert_token(&tokens[0], 0, "русский", 0, 14);
assert_token(&tokens[1], 1, "текст", 15, 25);
}
fn token_stream_helper(text: &str) -> Vec<Token> {
let mut token_stream = TextAnalyzer::from(SimpleTokenizer)
.filter(LowerCaser)
.token_stream(text);
let mut tokens = vec![];
let mut add_token = |token: &Token| {
tokens.push(token.clone());
};
token_stream.process(&mut add_token);
tokens
}
}
|
to_lowercase_unicode(&mut self.tail.token_mut().text, &mut self.buffer);
mem::swap(&mut self.tail.token_mut().text, &mut self.buffer);
|
random_line_split
|
lower_caser.rs
|
use super::{Token, TokenFilter, TokenStream};
use crate::tokenizer::BoxTokenStream;
use std::mem;
impl TokenFilter for LowerCaser {
fn transform<'a>(&self, token_stream: BoxTokenStream<'a>) -> BoxTokenStream<'a> {
BoxTokenStream::from(LowerCaserTokenStream {
tail: token_stream,
buffer: String::with_capacity(100),
})
}
}
/// Token filter that lowercase terms.
#[derive(Clone)]
pub struct LowerCaser;
pub struct LowerCaserTokenStream<'a> {
buffer: String,
tail: BoxTokenStream<'a>,
}
// writes a lowercased version of text into output.
fn to_lowercase_unicode(text: &mut String, output: &mut String) {
output.clear();
for c in text.chars() {
// Contrary to the std, we do not take care of sigma special case.
// This will have an normalizationo effect, which is ok for search.
output.extend(c.to_lowercase());
}
}
impl<'a> TokenStream for LowerCaserTokenStream<'a> {
fn advance(&mut self) -> bool {
if!self.tail.advance() {
return false;
}
if self.token_mut().text.is_ascii() {
// fast track for ascii.
self.token_mut().text.make_ascii_lowercase();
} else {
to_lowercase_unicode(&mut self.tail.token_mut().text, &mut self.buffer);
mem::swap(&mut self.tail.token_mut().text, &mut self.buffer);
}
true
}
fn token(&self) -> &Token {
self.tail.token()
}
fn token_mut(&mut self) -> &mut Token
|
}
#[cfg(test)]
mod tests {
use crate::tokenizer::tests::assert_token;
use crate::tokenizer::{LowerCaser, SimpleTokenizer, TextAnalyzer, Token};
#[test]
fn test_to_lower_case() {
let tokens = token_stream_helper("Tree");
assert_eq!(tokens.len(), 1);
assert_token(&tokens[0], 0, "tree", 0, 4);
let tokens = token_stream_helper("Русский текст");
assert_eq!(tokens.len(), 2);
assert_token(&tokens[0], 0, "русский", 0, 14);
assert_token(&tokens[1], 1, "текст", 15, 25);
}
fn token_stream_helper(text: &str) -> Vec<Token> {
let mut token_stream = TextAnalyzer::from(SimpleTokenizer)
.filter(LowerCaser)
.token_stream(text);
let mut tokens = vec![];
let mut add_token = |token: &Token| {
tokens.push(token.clone());
};
token_stream.process(&mut add_token);
tokens
}
}
|
{
self.tail.token_mut()
}
|
identifier_body
|
lower_caser.rs
|
use super::{Token, TokenFilter, TokenStream};
use crate::tokenizer::BoxTokenStream;
use std::mem;
impl TokenFilter for LowerCaser {
fn transform<'a>(&self, token_stream: BoxTokenStream<'a>) -> BoxTokenStream<'a> {
BoxTokenStream::from(LowerCaserTokenStream {
tail: token_stream,
buffer: String::with_capacity(100),
})
}
}
/// Token filter that lowercase terms.
#[derive(Clone)]
pub struct LowerCaser;
pub struct LowerCaserTokenStream<'a> {
buffer: String,
tail: BoxTokenStream<'a>,
}
// writes a lowercased version of text into output.
fn to_lowercase_unicode(text: &mut String, output: &mut String) {
output.clear();
for c in text.chars() {
// Contrary to the std, we do not take care of sigma special case.
// This will have an normalizationo effect, which is ok for search.
output.extend(c.to_lowercase());
}
}
impl<'a> TokenStream for LowerCaserTokenStream<'a> {
fn advance(&mut self) -> bool {
if!self.tail.advance() {
return false;
}
if self.token_mut().text.is_ascii() {
// fast track for ascii.
self.token_mut().text.make_ascii_lowercase();
} else
|
true
}
fn token(&self) -> &Token {
self.tail.token()
}
fn token_mut(&mut self) -> &mut Token {
self.tail.token_mut()
}
}
#[cfg(test)]
mod tests {
use crate::tokenizer::tests::assert_token;
use crate::tokenizer::{LowerCaser, SimpleTokenizer, TextAnalyzer, Token};
#[test]
fn test_to_lower_case() {
let tokens = token_stream_helper("Tree");
assert_eq!(tokens.len(), 1);
assert_token(&tokens[0], 0, "tree", 0, 4);
let tokens = token_stream_helper("Русский текст");
assert_eq!(tokens.len(), 2);
assert_token(&tokens[0], 0, "русский", 0, 14);
assert_token(&tokens[1], 1, "текст", 15, 25);
}
fn token_stream_helper(text: &str) -> Vec<Token> {
let mut token_stream = TextAnalyzer::from(SimpleTokenizer)
.filter(LowerCaser)
.token_stream(text);
let mut tokens = vec![];
let mut add_token = |token: &Token| {
tokens.push(token.clone());
};
token_stream.process(&mut add_token);
tokens
}
}
|
{
to_lowercase_unicode(&mut self.tail.token_mut().text, &mut self.buffer);
mem::swap(&mut self.tail.token_mut().text, &mut self.buffer);
}
|
conditional_block
|
simple_executor.rs
|
use crate::task::Task;
use alloc::collections::VecDeque;
use core::task::{RawWaker, RawWakerVTable, Waker};
pub struct SimpleExecutor {
task_queue: VecDeque<Task>,
|
task_queue: VecDeque::new(),
}
}
pub fn spawn(&mut self, task: Task) { self.task_queue.push_back(task) }
}
fn dummy_raw_waker() -> RawWaker {
fn no_op(_: *const ()) {}
fn clone(_: *const ()) -> RawWaker { dummy_raw_waker() }
let vtable = &RawWakerVTable::new(clone, no_op, no_op, no_op);
RawWaker::new(0 as *const (), vtable)
}
fn dummy_waker() -> Waker { unsafe { Waker::from_raw(dummy_raw_waker()) } }
|
}
impl SimpleExecutor {
pub fn new() -> SimpleExecutor {
SimpleExecutor {
|
random_line_split
|
simple_executor.rs
|
use crate::task::Task;
use alloc::collections::VecDeque;
use core::task::{RawWaker, RawWakerVTable, Waker};
pub struct SimpleExecutor {
task_queue: VecDeque<Task>,
}
impl SimpleExecutor {
pub fn new() -> SimpleExecutor {
SimpleExecutor {
task_queue: VecDeque::new(),
}
}
pub fn spawn(&mut self, task: Task) { self.task_queue.push_back(task) }
}
fn
|
() -> RawWaker {
fn no_op(_: *const ()) {}
fn clone(_: *const ()) -> RawWaker { dummy_raw_waker() }
let vtable = &RawWakerVTable::new(clone, no_op, no_op, no_op);
RawWaker::new(0 as *const (), vtable)
}
fn dummy_waker() -> Waker { unsafe { Waker::from_raw(dummy_raw_waker()) } }
|
dummy_raw_waker
|
identifier_name
|
simple_executor.rs
|
use crate::task::Task;
use alloc::collections::VecDeque;
use core::task::{RawWaker, RawWakerVTable, Waker};
pub struct SimpleExecutor {
task_queue: VecDeque<Task>,
}
impl SimpleExecutor {
pub fn new() -> SimpleExecutor
|
pub fn spawn(&mut self, task: Task) { self.task_queue.push_back(task) }
}
fn dummy_raw_waker() -> RawWaker {
fn no_op(_: *const ()) {}
fn clone(_: *const ()) -> RawWaker { dummy_raw_waker() }
let vtable = &RawWakerVTable::new(clone, no_op, no_op, no_op);
RawWaker::new(0 as *const (), vtable)
}
fn dummy_waker() -> Waker { unsafe { Waker::from_raw(dummy_raw_waker()) } }
|
{
SimpleExecutor {
task_queue: VecDeque::new(),
}
}
|
identifier_body
|
lib.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// TODO we're going to allocate a whole bunch of temp Strings, is it worth
// keeping some scratch mem for this and running our own StrPool?
// TODO for lint violations of names, emit a refactor script
#[macro_use]
extern crate log;
extern crate syntex_syntax as syntax;
extern crate rustc_serialize;
extern crate strings;
extern crate unicode_segmentation;
extern crate regex;
extern crate diff;
extern crate term;
extern crate itertools;
extern crate multimap;
use syntax::ast;
use syntax::codemap::{mk_sp, CodeMap, Span};
use syntax::errors::{Handler, DiagnosticBuilder};
use syntax::errors::emitter::{ColorConfig, EmitterWriter};
use syntax::parse::{self, ParseSess};
use strings::string_buffer::StringBuffer;
use std::io::{self, stdout, Write};
use std::ops::{Add, Sub};
use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::collections::HashMap;
use std::fmt;
use issues::{BadIssueSeeker, Issue};
use filemap::FileMap;
use visitor::FmtVisitor;
use config::Config;
use checkstyle::{output_header, output_footer};
pub use self::summary::Summary;
#[macro_use]
mod utils;
pub mod config;
pub mod codemap;
pub mod filemap;
pub mod file_lines;
pub mod visitor;
mod checkstyle;
mod items;
mod missed_spans;
mod lists;
mod types;
mod expr;
mod imports;
mod issues;
mod rewrite;
mod string;
mod comment;
pub mod modules;
pub mod rustfmt_diff;
mod chains;
mod macros;
mod patterns;
mod summary;
const MIN_STRING: usize = 10;
// When we get scoped annotations, we should have rustfmt::skip.
const SKIP_ANNOTATION: &'static str = "rustfmt_skip";
pub trait Spanned {
fn span(&self) -> Span;
}
impl Spanned for ast::Expr {
fn span(&self) -> Span {
self.span
}
}
impl Spanned for ast::Pat {
fn span(&self) -> Span {
self.span
}
}
impl Spanned for ast::Ty {
fn span(&self) -> Span {
self.span
}
}
impl Spanned for ast::Arg {
fn span(&self) -> Span {
if items::is_named_arg(self) {
mk_sp(self.pat.span.lo, self.ty.span.hi)
} else {
self.ty.span
}
}
}
#[derive(Copy, Clone, Debug)]
pub struct Indent {
// Width of the block indent, in characters. Must be a multiple of
// Config::tab_spaces.
pub block_indent: usize,
// Alignment in characters.
pub alignment: usize,
}
impl Indent {
pub fn new(block_indent: usize, alignment: usize) -> Indent {
Indent {
block_indent: block_indent,
alignment: alignment,
}
}
pub fn empty() -> Indent {
Indent::new(0, 0)
}
pub fn block_indent(mut self, config: &Config) -> Indent {
self.block_indent += config.tab_spaces;
self
}
pub fn block_unindent(mut self, config: &Config) -> Indent {
self.block_indent -= config.tab_spaces;
self
}
pub fn width(&self) -> usize {
self.block_indent + self.alignment
}
pub fn to_string(&self, config: &Config) -> String {
let (num_tabs, num_spaces) = if config.hard_tabs {
(self.block_indent / config.tab_spaces, self.alignment)
} else {
(0, self.block_indent + self.alignment)
};
let num_chars = num_tabs + num_spaces;
let mut indent = String::with_capacity(num_chars);
for _ in 0..num_tabs {
indent.push('\t')
}
for _ in 0..num_spaces {
indent.push(' ')
}
indent
}
}
impl Add for Indent {
type Output = Indent;
fn add(self, rhs: Indent) -> Indent {
Indent {
block_indent: self.block_indent + rhs.block_indent,
alignment: self.alignment + rhs.alignment,
}
}
}
impl Sub for Indent {
type Output = Indent;
fn sub(self, rhs: Indent) -> Indent {
Indent::new(self.block_indent - rhs.block_indent,
self.alignment - rhs.alignment)
}
}
impl Add<usize> for Indent {
type Output = Indent;
fn add(self, rhs: usize) -> Indent {
Indent::new(self.block_indent, self.alignment + rhs)
}
}
impl Sub<usize> for Indent {
type Output = Indent;
fn sub(self, rhs: usize) -> Indent {
Indent::new(self.block_indent, self.alignment - rhs)
}
}
pub enum ErrorKind {
// Line has exceeded character limit
LineOverflow,
// Line ends in whitespace
TrailingWhitespace,
// TO-DO or FIX-ME item without an issue number
BadIssue(Issue),
}
impl fmt::Display for ErrorKind {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
ErrorKind::LineOverflow => write!(fmt, "line exceeded maximum length"),
ErrorKind::TrailingWhitespace => write!(fmt, "left behind trailing whitespace"),
ErrorKind::BadIssue(issue) => write!(fmt, "found {}", issue),
}
}
}
// Formatting errors that are identified *after* rustfmt has run.
pub struct FormattingError {
line: u32,
kind: ErrorKind,
}
impl FormattingError {
fn msg_prefix(&self) -> &str {
match self.kind {
ErrorKind::LineOverflow |
ErrorKind::TrailingWhitespace => "Rustfmt failed at",
ErrorKind::BadIssue(_) => "WARNING:",
}
}
fn msg_suffix(&self) -> &str {
match self.kind {
ErrorKind::LineOverflow |
ErrorKind::TrailingWhitespace => "(sorry)",
ErrorKind::BadIssue(_) => "",
}
}
}
pub struct FormatReport {
// Maps stringified file paths to their associated formatting errors.
file_error_map: HashMap<String, Vec<FormattingError>>,
}
impl FormatReport {
fn new() -> FormatReport {
FormatReport { file_error_map: HashMap::new() }
}
pub fn warning_count(&self) -> usize {
self.file_error_map.iter().map(|(_, ref errors)| errors.len()).fold(0, |acc, x| acc + x)
}
pub fn has_warnings(&self) -> bool {
self.warning_count() > 0
}
}
impl fmt::Display for FormatReport {
// Prints all the formatting errors.
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
for (file, errors) in &self.file_error_map {
for error in errors {
try!(write!(fmt,
"{} {}:{}: {} {}\n",
error.msg_prefix(),
file,
error.line,
error.kind,
error.msg_suffix()));
}
}
Ok(())
}
}
// Formatting which depends on the AST.
fn format_ast<F>(krate: &ast::Crate,
parse_session: &ParseSess,
main_file: &Path,
config: &Config,
mut after_file: F)
-> Result<(FileMap, bool), io::Error>
where F: FnMut(&str, &mut StringBuffer) -> Result<bool, io::Error>
{
let mut result = FileMap::new();
// diff mode: check if any files are differing
let mut has_diff = false;
// We always skip children for the "Plain" write mode, since there is
// nothing to distinguish the nested module contents.
let skip_children = config.skip_children || config.write_mode == config::WriteMode::Plain;
for (path, module) in modules::list_files(krate, parse_session.codemap()) {
if skip_children && path.as_path()!= main_file {
continue;
}
let path = path.to_str().unwrap();
if config.verbose {
println!("Formatting {}", path);
}
let mut visitor = FmtVisitor::from_codemap(parse_session, config);
visitor.format_separate_mod(module);
has_diff |= try!(after_file(path, &mut visitor.buffer));
result.push((path.to_owned(), visitor.buffer));
}
Ok((result, has_diff))
}
// Formatting done on a char by char or line by line basis.
// FIXME(#209) warn on bad license
// FIXME(#20) other stuff for parity with make tidy
fn format_lines(text: &mut StringBuffer, name: &str, config: &Config, report: &mut FormatReport) {
// Iterate over the chars in the file map.
let mut trims = vec![];
let mut last_wspace: Option<usize> = None;
let mut line_len = 0;
let mut cur_line = 1;
let mut newline_count = 0;
let mut errors = vec![];
let mut issue_seeker = BadIssueSeeker::new(config.report_todo, config.report_fixme);
for (c, b) in text.chars() {
if c == '\r' {
line_len += c.len_utf8();
continue;
}
// Add warnings for bad todos/ fixmes
if let Some(issue) = issue_seeker.inspect(c) {
errors.push(FormattingError {
line: cur_line,
kind: ErrorKind::BadIssue(issue),
});
}
if c == '\n' {
// Check for (and record) trailing whitespace.
if let Some(lw) = last_wspace {
trims.push((cur_line, lw, b));
line_len -= b - lw;
}
// Check for any line width errors we couldn't correct.
if line_len > config.max_width {
errors.push(FormattingError {
line: cur_line,
kind: ErrorKind::LineOverflow,
});
}
line_len = 0;
cur_line += 1;
newline_count += 1;
last_wspace = None;
} else {
newline_count = 0;
line_len += c.len_utf8();
if c.is_whitespace() {
if last_wspace.is_none() {
last_wspace = Some(b);
}
} else {
last_wspace = None;
}
}
}
if newline_count > 1 {
debug!("track truncate: {} {}", text.len, newline_count);
let line = text.len - newline_count + 1;
text.truncate(line);
}
for &(l, _, _) in &trims {
errors.push(FormattingError {
line: l,
kind: ErrorKind::TrailingWhitespace,
});
}
report.file_error_map.insert(name.to_owned(), errors);
}
fn parse_input(input: Input,
parse_session: &ParseSess)
-> Result<ast::Crate, Option<DiagnosticBuilder>> {
let result = match input {
Input::File(file) => parse::parse_crate_from_file(&file, Vec::new(), &parse_session),
Input::Text(text) => {
parse::parse_crate_from_source_str("stdin".to_owned(), text, Vec::new(), &parse_session)
}
};
match result {
Ok(c) => {
if parse_session.span_diagnostic.has_errors() {
// Bail out if the parser recovered from an error.
Err(None)
} else {
Ok(c)
}
}
Err(e) => Err(Some(e)),
}
}
pub fn format_input<T: Write>(input: Input,
config: &Config,
mut out: Option<&mut T>)
-> Result<(Summary, FileMap, FormatReport), (io::Error, Summary)> {
let mut summary = Summary::new();
let codemap = Rc::new(CodeMap::new());
let tty_handler =
Handler::with_tty_emitter(ColorConfig::Auto, None, true, false, codemap.clone());
let mut parse_session = ParseSess::with_span_handler(tty_handler, codemap.clone());
|
let krate = match parse_input(input, &parse_session) {
Ok(krate) => krate,
Err(diagnostic) => {
if let Some(mut diagnostic) = diagnostic {
diagnostic.emit();
}
summary.add_parsing_error();
return Ok((summary, FileMap::new(), FormatReport::new()));
}
};
if parse_session.span_diagnostic.has_errors() {
summary.add_parsing_error();
}
// Suppress error output after parsing.
let silent_emitter = Box::new(EmitterWriter::new(Box::new(Vec::new()), None, codemap.clone()));
parse_session.span_diagnostic = Handler::with_emitter(true, false, silent_emitter);
let mut report = FormatReport::new();
match format_ast(&krate,
&parse_session,
&main_file,
config,
|file_name, file| {
// For some reason, the codemap does not include terminating
// newlines so we must add one on for each file. This is sad.
filemap::append_newline(file);
format_lines(file, file_name, config, &mut report);
if let Some(ref mut out) = out {
return filemap::write_file(file, file_name, out, config);
}
Ok(false)
}) {
Ok((file_map, has_diff)) => {
if report.has_warnings() {
summary.add_formatting_error();
}
if has_diff {
summary.add_diff();
}
Ok((summary, file_map, report))
}
Err(e) => Err((e, summary)),
}
}
#[derive(Debug)]
pub enum Input {
File(PathBuf),
Text(String),
}
pub fn run(input: Input, config: &Config) -> Summary {
let mut out = &mut stdout();
output_header(out, config.write_mode).ok();
match format_input(input, config, Some(out)) {
Ok((summary, _, report)) => {
output_footer(out, config.write_mode).ok();
if report.has_warnings() {
msg!("{}", report);
}
summary
}
Err((msg, mut summary)) => {
msg!("Error writing files: {}", msg);
summary.add_operational_error();
summary
}
}
}
|
let main_file = match input {
Input::File(ref file) => file.clone(),
Input::Text(..) => PathBuf::from("stdin"),
};
|
random_line_split
|
lib.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// TODO we're going to allocate a whole bunch of temp Strings, is it worth
// keeping some scratch mem for this and running our own StrPool?
// TODO for lint violations of names, emit a refactor script
#[macro_use]
extern crate log;
extern crate syntex_syntax as syntax;
extern crate rustc_serialize;
extern crate strings;
extern crate unicode_segmentation;
extern crate regex;
extern crate diff;
extern crate term;
extern crate itertools;
extern crate multimap;
use syntax::ast;
use syntax::codemap::{mk_sp, CodeMap, Span};
use syntax::errors::{Handler, DiagnosticBuilder};
use syntax::errors::emitter::{ColorConfig, EmitterWriter};
use syntax::parse::{self, ParseSess};
use strings::string_buffer::StringBuffer;
use std::io::{self, stdout, Write};
use std::ops::{Add, Sub};
use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::collections::HashMap;
use std::fmt;
use issues::{BadIssueSeeker, Issue};
use filemap::FileMap;
use visitor::FmtVisitor;
use config::Config;
use checkstyle::{output_header, output_footer};
pub use self::summary::Summary;
#[macro_use]
mod utils;
pub mod config;
pub mod codemap;
pub mod filemap;
pub mod file_lines;
pub mod visitor;
mod checkstyle;
mod items;
mod missed_spans;
mod lists;
mod types;
mod expr;
mod imports;
mod issues;
mod rewrite;
mod string;
mod comment;
pub mod modules;
pub mod rustfmt_diff;
mod chains;
mod macros;
mod patterns;
mod summary;
const MIN_STRING: usize = 10;
// When we get scoped annotations, we should have rustfmt::skip.
const SKIP_ANNOTATION: &'static str = "rustfmt_skip";
pub trait Spanned {
fn span(&self) -> Span;
}
impl Spanned for ast::Expr {
fn span(&self) -> Span {
self.span
}
}
impl Spanned for ast::Pat {
fn span(&self) -> Span {
self.span
}
}
impl Spanned for ast::Ty {
fn span(&self) -> Span {
self.span
}
}
impl Spanned for ast::Arg {
fn span(&self) -> Span {
if items::is_named_arg(self) {
mk_sp(self.pat.span.lo, self.ty.span.hi)
} else {
self.ty.span
}
}
}
#[derive(Copy, Clone, Debug)]
pub struct Indent {
// Width of the block indent, in characters. Must be a multiple of
// Config::tab_spaces.
pub block_indent: usize,
// Alignment in characters.
pub alignment: usize,
}
impl Indent {
pub fn new(block_indent: usize, alignment: usize) -> Indent {
Indent {
block_indent: block_indent,
alignment: alignment,
}
}
pub fn empty() -> Indent {
Indent::new(0, 0)
}
pub fn block_indent(mut self, config: &Config) -> Indent {
self.block_indent += config.tab_spaces;
self
}
pub fn block_unindent(mut self, config: &Config) -> Indent {
self.block_indent -= config.tab_spaces;
self
}
pub fn width(&self) -> usize {
self.block_indent + self.alignment
}
pub fn to_string(&self, config: &Config) -> String {
let (num_tabs, num_spaces) = if config.hard_tabs {
(self.block_indent / config.tab_spaces, self.alignment)
} else {
(0, self.block_indent + self.alignment)
};
let num_chars = num_tabs + num_spaces;
let mut indent = String::with_capacity(num_chars);
for _ in 0..num_tabs {
indent.push('\t')
}
for _ in 0..num_spaces {
indent.push(' ')
}
indent
}
}
impl Add for Indent {
type Output = Indent;
fn add(self, rhs: Indent) -> Indent {
Indent {
block_indent: self.block_indent + rhs.block_indent,
alignment: self.alignment + rhs.alignment,
}
}
}
impl Sub for Indent {
type Output = Indent;
fn sub(self, rhs: Indent) -> Indent {
Indent::new(self.block_indent - rhs.block_indent,
self.alignment - rhs.alignment)
}
}
impl Add<usize> for Indent {
type Output = Indent;
fn add(self, rhs: usize) -> Indent {
Indent::new(self.block_indent, self.alignment + rhs)
}
}
impl Sub<usize> for Indent {
type Output = Indent;
fn sub(self, rhs: usize) -> Indent {
Indent::new(self.block_indent, self.alignment - rhs)
}
}
pub enum ErrorKind {
// Line has exceeded character limit
LineOverflow,
// Line ends in whitespace
TrailingWhitespace,
// TO-DO or FIX-ME item without an issue number
BadIssue(Issue),
}
impl fmt::Display for ErrorKind {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
ErrorKind::LineOverflow => write!(fmt, "line exceeded maximum length"),
ErrorKind::TrailingWhitespace => write!(fmt, "left behind trailing whitespace"),
ErrorKind::BadIssue(issue) => write!(fmt, "found {}", issue),
}
}
}
// Formatting errors that are identified *after* rustfmt has run.
pub struct FormattingError {
line: u32,
kind: ErrorKind,
}
impl FormattingError {
fn msg_prefix(&self) -> &str {
match self.kind {
ErrorKind::LineOverflow |
ErrorKind::TrailingWhitespace => "Rustfmt failed at",
ErrorKind::BadIssue(_) => "WARNING:",
}
}
fn msg_suffix(&self) -> &str {
match self.kind {
ErrorKind::LineOverflow |
ErrorKind::TrailingWhitespace => "(sorry)",
ErrorKind::BadIssue(_) => "",
}
}
}
pub struct FormatReport {
// Maps stringified file paths to their associated formatting errors.
file_error_map: HashMap<String, Vec<FormattingError>>,
}
impl FormatReport {
fn new() -> FormatReport {
FormatReport { file_error_map: HashMap::new() }
}
pub fn warning_count(&self) -> usize {
self.file_error_map.iter().map(|(_, ref errors)| errors.len()).fold(0, |acc, x| acc + x)
}
pub fn
|
(&self) -> bool {
self.warning_count() > 0
}
}
impl fmt::Display for FormatReport {
// Prints all the formatting errors.
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
for (file, errors) in &self.file_error_map {
for error in errors {
try!(write!(fmt,
"{} {}:{}: {} {}\n",
error.msg_prefix(),
file,
error.line,
error.kind,
error.msg_suffix()));
}
}
Ok(())
}
}
// Formatting which depends on the AST.
fn format_ast<F>(krate: &ast::Crate,
parse_session: &ParseSess,
main_file: &Path,
config: &Config,
mut after_file: F)
-> Result<(FileMap, bool), io::Error>
where F: FnMut(&str, &mut StringBuffer) -> Result<bool, io::Error>
{
let mut result = FileMap::new();
// diff mode: check if any files are differing
let mut has_diff = false;
// We always skip children for the "Plain" write mode, since there is
// nothing to distinguish the nested module contents.
let skip_children = config.skip_children || config.write_mode == config::WriteMode::Plain;
for (path, module) in modules::list_files(krate, parse_session.codemap()) {
if skip_children && path.as_path()!= main_file {
continue;
}
let path = path.to_str().unwrap();
if config.verbose {
println!("Formatting {}", path);
}
let mut visitor = FmtVisitor::from_codemap(parse_session, config);
visitor.format_separate_mod(module);
has_diff |= try!(after_file(path, &mut visitor.buffer));
result.push((path.to_owned(), visitor.buffer));
}
Ok((result, has_diff))
}
// Formatting done on a char by char or line by line basis.
// FIXME(#209) warn on bad license
// FIXME(#20) other stuff for parity with make tidy
fn format_lines(text: &mut StringBuffer, name: &str, config: &Config, report: &mut FormatReport) {
// Iterate over the chars in the file map.
let mut trims = vec![];
let mut last_wspace: Option<usize> = None;
let mut line_len = 0;
let mut cur_line = 1;
let mut newline_count = 0;
let mut errors = vec![];
let mut issue_seeker = BadIssueSeeker::new(config.report_todo, config.report_fixme);
for (c, b) in text.chars() {
if c == '\r' {
line_len += c.len_utf8();
continue;
}
// Add warnings for bad todos/ fixmes
if let Some(issue) = issue_seeker.inspect(c) {
errors.push(FormattingError {
line: cur_line,
kind: ErrorKind::BadIssue(issue),
});
}
if c == '\n' {
// Check for (and record) trailing whitespace.
if let Some(lw) = last_wspace {
trims.push((cur_line, lw, b));
line_len -= b - lw;
}
// Check for any line width errors we couldn't correct.
if line_len > config.max_width {
errors.push(FormattingError {
line: cur_line,
kind: ErrorKind::LineOverflow,
});
}
line_len = 0;
cur_line += 1;
newline_count += 1;
last_wspace = None;
} else {
newline_count = 0;
line_len += c.len_utf8();
if c.is_whitespace() {
if last_wspace.is_none() {
last_wspace = Some(b);
}
} else {
last_wspace = None;
}
}
}
if newline_count > 1 {
debug!("track truncate: {} {}", text.len, newline_count);
let line = text.len - newline_count + 1;
text.truncate(line);
}
for &(l, _, _) in &trims {
errors.push(FormattingError {
line: l,
kind: ErrorKind::TrailingWhitespace,
});
}
report.file_error_map.insert(name.to_owned(), errors);
}
fn parse_input(input: Input,
parse_session: &ParseSess)
-> Result<ast::Crate, Option<DiagnosticBuilder>> {
let result = match input {
Input::File(file) => parse::parse_crate_from_file(&file, Vec::new(), &parse_session),
Input::Text(text) => {
parse::parse_crate_from_source_str("stdin".to_owned(), text, Vec::new(), &parse_session)
}
};
match result {
Ok(c) => {
if parse_session.span_diagnostic.has_errors() {
// Bail out if the parser recovered from an error.
Err(None)
} else {
Ok(c)
}
}
Err(e) => Err(Some(e)),
}
}
pub fn format_input<T: Write>(input: Input,
config: &Config,
mut out: Option<&mut T>)
-> Result<(Summary, FileMap, FormatReport), (io::Error, Summary)> {
let mut summary = Summary::new();
let codemap = Rc::new(CodeMap::new());
let tty_handler =
Handler::with_tty_emitter(ColorConfig::Auto, None, true, false, codemap.clone());
let mut parse_session = ParseSess::with_span_handler(tty_handler, codemap.clone());
let main_file = match input {
Input::File(ref file) => file.clone(),
Input::Text(..) => PathBuf::from("stdin"),
};
let krate = match parse_input(input, &parse_session) {
Ok(krate) => krate,
Err(diagnostic) => {
if let Some(mut diagnostic) = diagnostic {
diagnostic.emit();
}
summary.add_parsing_error();
return Ok((summary, FileMap::new(), FormatReport::new()));
}
};
if parse_session.span_diagnostic.has_errors() {
summary.add_parsing_error();
}
// Suppress error output after parsing.
let silent_emitter = Box::new(EmitterWriter::new(Box::new(Vec::new()), None, codemap.clone()));
parse_session.span_diagnostic = Handler::with_emitter(true, false, silent_emitter);
let mut report = FormatReport::new();
match format_ast(&krate,
&parse_session,
&main_file,
config,
|file_name, file| {
// For some reason, the codemap does not include terminating
// newlines so we must add one on for each file. This is sad.
filemap::append_newline(file);
format_lines(file, file_name, config, &mut report);
if let Some(ref mut out) = out {
return filemap::write_file(file, file_name, out, config);
}
Ok(false)
}) {
Ok((file_map, has_diff)) => {
if report.has_warnings() {
summary.add_formatting_error();
}
if has_diff {
summary.add_diff();
}
Ok((summary, file_map, report))
}
Err(e) => Err((e, summary)),
}
}
#[derive(Debug)]
pub enum Input {
File(PathBuf),
Text(String),
}
pub fn run(input: Input, config: &Config) -> Summary {
let mut out = &mut stdout();
output_header(out, config.write_mode).ok();
match format_input(input, config, Some(out)) {
Ok((summary, _, report)) => {
output_footer(out, config.write_mode).ok();
if report.has_warnings() {
msg!("{}", report);
}
summary
}
Err((msg, mut summary)) => {
msg!("Error writing files: {}", msg);
summary.add_operational_error();
summary
}
}
}
|
has_warnings
|
identifier_name
|
lib.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// TODO we're going to allocate a whole bunch of temp Strings, is it worth
// keeping some scratch mem for this and running our own StrPool?
// TODO for lint violations of names, emit a refactor script
#[macro_use]
extern crate log;
extern crate syntex_syntax as syntax;
extern crate rustc_serialize;
extern crate strings;
extern crate unicode_segmentation;
extern crate regex;
extern crate diff;
extern crate term;
extern crate itertools;
extern crate multimap;
use syntax::ast;
use syntax::codemap::{mk_sp, CodeMap, Span};
use syntax::errors::{Handler, DiagnosticBuilder};
use syntax::errors::emitter::{ColorConfig, EmitterWriter};
use syntax::parse::{self, ParseSess};
use strings::string_buffer::StringBuffer;
use std::io::{self, stdout, Write};
use std::ops::{Add, Sub};
use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::collections::HashMap;
use std::fmt;
use issues::{BadIssueSeeker, Issue};
use filemap::FileMap;
use visitor::FmtVisitor;
use config::Config;
use checkstyle::{output_header, output_footer};
pub use self::summary::Summary;
#[macro_use]
mod utils;
pub mod config;
pub mod codemap;
pub mod filemap;
pub mod file_lines;
pub mod visitor;
mod checkstyle;
mod items;
mod missed_spans;
mod lists;
mod types;
mod expr;
mod imports;
mod issues;
mod rewrite;
mod string;
mod comment;
pub mod modules;
pub mod rustfmt_diff;
mod chains;
mod macros;
mod patterns;
mod summary;
const MIN_STRING: usize = 10;
// When we get scoped annotations, we should have rustfmt::skip.
const SKIP_ANNOTATION: &'static str = "rustfmt_skip";
pub trait Spanned {
fn span(&self) -> Span;
}
impl Spanned for ast::Expr {
fn span(&self) -> Span {
self.span
}
}
impl Spanned for ast::Pat {
fn span(&self) -> Span
|
}
impl Spanned for ast::Ty {
fn span(&self) -> Span {
self.span
}
}
impl Spanned for ast::Arg {
fn span(&self) -> Span {
if items::is_named_arg(self) {
mk_sp(self.pat.span.lo, self.ty.span.hi)
} else {
self.ty.span
}
}
}
#[derive(Copy, Clone, Debug)]
pub struct Indent {
// Width of the block indent, in characters. Must be a multiple of
// Config::tab_spaces.
pub block_indent: usize,
// Alignment in characters.
pub alignment: usize,
}
impl Indent {
pub fn new(block_indent: usize, alignment: usize) -> Indent {
Indent {
block_indent: block_indent,
alignment: alignment,
}
}
pub fn empty() -> Indent {
Indent::new(0, 0)
}
pub fn block_indent(mut self, config: &Config) -> Indent {
self.block_indent += config.tab_spaces;
self
}
pub fn block_unindent(mut self, config: &Config) -> Indent {
self.block_indent -= config.tab_spaces;
self
}
pub fn width(&self) -> usize {
self.block_indent + self.alignment
}
pub fn to_string(&self, config: &Config) -> String {
let (num_tabs, num_spaces) = if config.hard_tabs {
(self.block_indent / config.tab_spaces, self.alignment)
} else {
(0, self.block_indent + self.alignment)
};
let num_chars = num_tabs + num_spaces;
let mut indent = String::with_capacity(num_chars);
for _ in 0..num_tabs {
indent.push('\t')
}
for _ in 0..num_spaces {
indent.push(' ')
}
indent
}
}
impl Add for Indent {
type Output = Indent;
fn add(self, rhs: Indent) -> Indent {
Indent {
block_indent: self.block_indent + rhs.block_indent,
alignment: self.alignment + rhs.alignment,
}
}
}
impl Sub for Indent {
type Output = Indent;
fn sub(self, rhs: Indent) -> Indent {
Indent::new(self.block_indent - rhs.block_indent,
self.alignment - rhs.alignment)
}
}
impl Add<usize> for Indent {
type Output = Indent;
fn add(self, rhs: usize) -> Indent {
Indent::new(self.block_indent, self.alignment + rhs)
}
}
impl Sub<usize> for Indent {
type Output = Indent;
fn sub(self, rhs: usize) -> Indent {
Indent::new(self.block_indent, self.alignment - rhs)
}
}
pub enum ErrorKind {
// Line has exceeded character limit
LineOverflow,
// Line ends in whitespace
TrailingWhitespace,
// TO-DO or FIX-ME item without an issue number
BadIssue(Issue),
}
impl fmt::Display for ErrorKind {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
ErrorKind::LineOverflow => write!(fmt, "line exceeded maximum length"),
ErrorKind::TrailingWhitespace => write!(fmt, "left behind trailing whitespace"),
ErrorKind::BadIssue(issue) => write!(fmt, "found {}", issue),
}
}
}
// Formatting errors that are identified *after* rustfmt has run.
pub struct FormattingError {
line: u32,
kind: ErrorKind,
}
impl FormattingError {
fn msg_prefix(&self) -> &str {
match self.kind {
ErrorKind::LineOverflow |
ErrorKind::TrailingWhitespace => "Rustfmt failed at",
ErrorKind::BadIssue(_) => "WARNING:",
}
}
fn msg_suffix(&self) -> &str {
match self.kind {
ErrorKind::LineOverflow |
ErrorKind::TrailingWhitespace => "(sorry)",
ErrorKind::BadIssue(_) => "",
}
}
}
pub struct FormatReport {
// Maps stringified file paths to their associated formatting errors.
file_error_map: HashMap<String, Vec<FormattingError>>,
}
impl FormatReport {
fn new() -> FormatReport {
FormatReport { file_error_map: HashMap::new() }
}
pub fn warning_count(&self) -> usize {
self.file_error_map.iter().map(|(_, ref errors)| errors.len()).fold(0, |acc, x| acc + x)
}
pub fn has_warnings(&self) -> bool {
self.warning_count() > 0
}
}
impl fmt::Display for FormatReport {
// Prints all the formatting errors.
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
for (file, errors) in &self.file_error_map {
for error in errors {
try!(write!(fmt,
"{} {}:{}: {} {}\n",
error.msg_prefix(),
file,
error.line,
error.kind,
error.msg_suffix()));
}
}
Ok(())
}
}
// Formatting which depends on the AST.
fn format_ast<F>(krate: &ast::Crate,
parse_session: &ParseSess,
main_file: &Path,
config: &Config,
mut after_file: F)
-> Result<(FileMap, bool), io::Error>
where F: FnMut(&str, &mut StringBuffer) -> Result<bool, io::Error>
{
let mut result = FileMap::new();
// diff mode: check if any files are differing
let mut has_diff = false;
// We always skip children for the "Plain" write mode, since there is
// nothing to distinguish the nested module contents.
let skip_children = config.skip_children || config.write_mode == config::WriteMode::Plain;
for (path, module) in modules::list_files(krate, parse_session.codemap()) {
if skip_children && path.as_path()!= main_file {
continue;
}
let path = path.to_str().unwrap();
if config.verbose {
println!("Formatting {}", path);
}
let mut visitor = FmtVisitor::from_codemap(parse_session, config);
visitor.format_separate_mod(module);
has_diff |= try!(after_file(path, &mut visitor.buffer));
result.push((path.to_owned(), visitor.buffer));
}
Ok((result, has_diff))
}
// Formatting done on a char by char or line by line basis.
// FIXME(#209) warn on bad license
// FIXME(#20) other stuff for parity with make tidy
fn format_lines(text: &mut StringBuffer, name: &str, config: &Config, report: &mut FormatReport) {
// Iterate over the chars in the file map.
let mut trims = vec![];
let mut last_wspace: Option<usize> = None;
let mut line_len = 0;
let mut cur_line = 1;
let mut newline_count = 0;
let mut errors = vec![];
let mut issue_seeker = BadIssueSeeker::new(config.report_todo, config.report_fixme);
for (c, b) in text.chars() {
if c == '\r' {
line_len += c.len_utf8();
continue;
}
// Add warnings for bad todos/ fixmes
if let Some(issue) = issue_seeker.inspect(c) {
errors.push(FormattingError {
line: cur_line,
kind: ErrorKind::BadIssue(issue),
});
}
if c == '\n' {
// Check for (and record) trailing whitespace.
if let Some(lw) = last_wspace {
trims.push((cur_line, lw, b));
line_len -= b - lw;
}
// Check for any line width errors we couldn't correct.
if line_len > config.max_width {
errors.push(FormattingError {
line: cur_line,
kind: ErrorKind::LineOverflow,
});
}
line_len = 0;
cur_line += 1;
newline_count += 1;
last_wspace = None;
} else {
newline_count = 0;
line_len += c.len_utf8();
if c.is_whitespace() {
if last_wspace.is_none() {
last_wspace = Some(b);
}
} else {
last_wspace = None;
}
}
}
if newline_count > 1 {
debug!("track truncate: {} {}", text.len, newline_count);
let line = text.len - newline_count + 1;
text.truncate(line);
}
for &(l, _, _) in &trims {
errors.push(FormattingError {
line: l,
kind: ErrorKind::TrailingWhitespace,
});
}
report.file_error_map.insert(name.to_owned(), errors);
}
fn parse_input(input: Input,
parse_session: &ParseSess)
-> Result<ast::Crate, Option<DiagnosticBuilder>> {
let result = match input {
Input::File(file) => parse::parse_crate_from_file(&file, Vec::new(), &parse_session),
Input::Text(text) => {
parse::parse_crate_from_source_str("stdin".to_owned(), text, Vec::new(), &parse_session)
}
};
match result {
Ok(c) => {
if parse_session.span_diagnostic.has_errors() {
// Bail out if the parser recovered from an error.
Err(None)
} else {
Ok(c)
}
}
Err(e) => Err(Some(e)),
}
}
pub fn format_input<T: Write>(input: Input,
config: &Config,
mut out: Option<&mut T>)
-> Result<(Summary, FileMap, FormatReport), (io::Error, Summary)> {
let mut summary = Summary::new();
let codemap = Rc::new(CodeMap::new());
let tty_handler =
Handler::with_tty_emitter(ColorConfig::Auto, None, true, false, codemap.clone());
let mut parse_session = ParseSess::with_span_handler(tty_handler, codemap.clone());
let main_file = match input {
Input::File(ref file) => file.clone(),
Input::Text(..) => PathBuf::from("stdin"),
};
let krate = match parse_input(input, &parse_session) {
Ok(krate) => krate,
Err(diagnostic) => {
if let Some(mut diagnostic) = diagnostic {
diagnostic.emit();
}
summary.add_parsing_error();
return Ok((summary, FileMap::new(), FormatReport::new()));
}
};
if parse_session.span_diagnostic.has_errors() {
summary.add_parsing_error();
}
// Suppress error output after parsing.
let silent_emitter = Box::new(EmitterWriter::new(Box::new(Vec::new()), None, codemap.clone()));
parse_session.span_diagnostic = Handler::with_emitter(true, false, silent_emitter);
let mut report = FormatReport::new();
match format_ast(&krate,
&parse_session,
&main_file,
config,
|file_name, file| {
// For some reason, the codemap does not include terminating
// newlines so we must add one on for each file. This is sad.
filemap::append_newline(file);
format_lines(file, file_name, config, &mut report);
if let Some(ref mut out) = out {
return filemap::write_file(file, file_name, out, config);
}
Ok(false)
}) {
Ok((file_map, has_diff)) => {
if report.has_warnings() {
summary.add_formatting_error();
}
if has_diff {
summary.add_diff();
}
Ok((summary, file_map, report))
}
Err(e) => Err((e, summary)),
}
}
#[derive(Debug)]
pub enum Input {
File(PathBuf),
Text(String),
}
pub fn run(input: Input, config: &Config) -> Summary {
let mut out = &mut stdout();
output_header(out, config.write_mode).ok();
match format_input(input, config, Some(out)) {
Ok((summary, _, report)) => {
output_footer(out, config.write_mode).ok();
if report.has_warnings() {
msg!("{}", report);
}
summary
}
Err((msg, mut summary)) => {
msg!("Error writing files: {}", msg);
summary.add_operational_error();
summary
}
}
}
|
{
self.span
}
|
identifier_body
|
font.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// Implementation of Quartz (CoreGraphics) fonts.
use app_units::Au;
use byteorder::{BigEndian, ByteOrder};
use core_foundation::base::CFIndex;
use core_foundation::data::CFData;
use core_foundation::string::UniChar;
use core_graphics::font::CGGlyph;
use core_graphics::geometry::CGRect;
use core_text::font::CTFont;
use core_text::font_descriptor::kCTFontDefaultOrientation;
use core_text::font_descriptor::{SymbolicTraitAccessors, TraitAccessors};
use crate::font::{
FontHandleMethods, FontMetrics, FontTableMethods, FontTableTag, FractionalPixel,
};
use crate::font::{GPOS, GSUB, KERN};
use crate::platform::font_template::FontTemplateData;
use crate::platform::macos::font_context::FontContextHandle;
use crate::text::glyph::GlyphId;
use servo_atoms::Atom;
use std::ops::Range;
use std::sync::Arc;
use std::{fmt, ptr};
use style::values::computed::font::{FontStretch, FontStyle, FontWeight};
const KERN_PAIR_LEN: usize = 6;
pub struct FontTable {
data: CFData,
}
// assumes 72 points per inch, and 96 px per inch
fn px_to_pt(px: f64) -> f64 {
px / 96. * 72.
}
// assumes 72 points per inch, and 96 px per inch
fn pt_to_px(pt: f64) -> f64 {
pt / 72. * 96.
}
fn au_from_pt(pt: f64) -> Au {
Au::from_f64_px(pt_to_px(pt))
}
impl FontTable {
pub fn wrap(data: CFData) -> FontTable {
FontTable { data: data }
}
}
impl FontTableMethods for FontTable {
fn buffer(&self) -> &[u8] {
self.data.bytes()
}
}
#[derive(Debug)]
pub struct FontHandle {
font_data: Arc<FontTemplateData>,
ctfont: CTFont,
h_kern_subtable: Option<CachedKernTable>,
can_do_fast_shaping: bool,
}
impl FontHandle {
/// Cache all the data needed for basic horizontal kerning. This is used only as a fallback or
/// fast path (when the GPOS table is missing or unnecessary) so it needn't handle every case.
fn find_h_kern_subtable(&self) -> Option<CachedKernTable> {
let font_table = self.table_for_tag(KERN)?;
let mut result = CachedKernTable {
font_table: font_table,
pair_data_range: 0..0,
px_per_font_unit: 0.0,
};
// Look for a subtable with horizontal kerning in format 0.
// https://www.microsoft.com/typography/otspec/kern.htm
const KERN_COVERAGE_HORIZONTAL_FORMAT_0: u16 = 1;
const SUBTABLE_HEADER_LEN: usize = 6;
const FORMAT_0_HEADER_LEN: usize = 8;
{
let table = result.font_table.buffer();
let version = BigEndian::read_u16(table);
if version!= 0 {
return None;
}
let num_subtables = BigEndian::read_u16(&table[2..]);
let mut start = 4;
for _ in 0..num_subtables {
// TODO: Check the subtable version number?
let len = BigEndian::read_u16(&table[start + 2..]) as usize;
let cov = BigEndian::read_u16(&table[start + 4..]);
let end = start + len;
if cov == KERN_COVERAGE_HORIZONTAL_FORMAT_0 {
// Found a matching subtable.
if result.pair_data_range.len() > 0 {
debug!("Found multiple horizontal kern tables. Disable fast path.");
return None;
}
// Read the subtable header.
let subtable_start = start + SUBTABLE_HEADER_LEN;
let n_pairs = BigEndian::read_u16(&table[subtable_start..]) as usize;
let pair_data_start = subtable_start + FORMAT_0_HEADER_LEN;
result.pair_data_range = pair_data_start..end;
if result.pair_data_range.len()!= n_pairs * KERN_PAIR_LEN {
debug!("Bad data in kern header. Disable fast path.");
return None;
}
let pt_per_font_unit =
self.ctfont.pt_size() as f64 / self.ctfont.units_per_em() as f64;
result.px_per_font_unit = pt_to_px(pt_per_font_unit);
}
start = end;
}
}
if result.pair_data_range.len() > 0 {
Some(result)
} else {
None
}
}
}
struct CachedKernTable {
font_table: FontTable,
pair_data_range: Range<usize>,
px_per_font_unit: f64,
}
impl CachedKernTable {
/// Search for a glyph pair in the kern table and return the corresponding value.
fn binary_search(&self, first_glyph: GlyphId, second_glyph: GlyphId) -> Option<i16> {
let pairs = &self.font_table.buffer()[self.pair_data_range.clone()];
let query = first_glyph << 16 | second_glyph;
let (mut start, mut end) = (0, pairs.len() / KERN_PAIR_LEN);
while start < end {
let i = (start + end) / 2;
let key = BigEndian::read_u32(&pairs[i * KERN_PAIR_LEN..]);
if key > query {
end = i;
} else if key < query {
start = i + 1;
} else {
return Some(BigEndian::read_i16(&pairs[i * KERN_PAIR_LEN + 4..]));
}
}
None
}
}
impl fmt::Debug for CachedKernTable {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "CachedKernTable")
}
}
impl FontHandleMethods for FontHandle {
fn new_from_template(
_fctx: &FontContextHandle,
template: Arc<FontTemplateData>,
pt_size: Option<Au>,
) -> Result<FontHandle, ()>
|
None => Err(()),
}
}
fn template(&self) -> Arc<FontTemplateData> {
self.font_data.clone()
}
fn family_name(&self) -> Option<String> {
Some(self.ctfont.family_name())
}
fn face_name(&self) -> Option<String> {
Some(self.ctfont.face_name())
}
fn style(&self) -> FontStyle {
use style::values::generics::font::FontStyle::*;
if self.ctfont.symbolic_traits().is_italic() {
Italic
} else {
Normal
}
}
fn boldness(&self) -> FontWeight {
let normalized = self.ctfont.all_traits().normalized_weight(); // [-1.0, 1.0]
// TODO(emilio): It may make sense to make this range [.01, 10.0], to
// align with css-fonts-4's range of [1, 1000].
let normalized = if normalized <= 0.0 {
4.0 + normalized * 3.0 // [1.0, 4.0]
} else {
4.0 + normalized * 5.0 // [4.0, 9.0]
}; // [1.0, 9.0], centered on 4.0
FontWeight(normalized as f32 * 100.)
}
fn stretchiness(&self) -> FontStretch {
use style::values::computed::Percentage;
use style::values::generics::NonNegative;
let normalized = self.ctfont.all_traits().normalized_width(); // [-1.0, 1.0]
FontStretch(NonNegative(Percentage(normalized as f32 + 1.0)))
}
fn glyph_index(&self, codepoint: char) -> Option<GlyphId> {
let characters: [UniChar; 1] = [codepoint as UniChar];
let mut glyphs: [CGGlyph; 1] = [0 as CGGlyph];
let count: CFIndex = 1;
let result = unsafe {
self.ctfont
.get_glyphs_for_characters(&characters[0], &mut glyphs[0], count)
};
if!result {
// No glyph for this character
return None;
}
assert_ne!(glyphs[0], 0); // FIXME: error handling
return Some(glyphs[0] as GlyphId);
}
fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId) -> FractionalPixel {
if let Some(ref table) = self.h_kern_subtable {
if let Some(font_units) = table.binary_search(first_glyph, second_glyph) {
return font_units as f64 * table.px_per_font_unit;
}
}
0.0
}
fn can_do_fast_shaping(&self) -> bool {
self.can_do_fast_shaping
}
fn glyph_h_advance(&self, glyph: GlyphId) -> Option<FractionalPixel> {
let glyphs = [glyph as CGGlyph];
let advance = unsafe {
self.ctfont.get_advances_for_glyphs(
kCTFontDefaultOrientation,
&glyphs[0],
ptr::null_mut(),
1,
)
};
Some(advance as FractionalPixel)
}
fn metrics(&self) -> FontMetrics {
let bounding_rect: CGRect = self.ctfont.bounding_box();
let ascent = self.ctfont.ascent() as f64;
let descent = self.ctfont.descent() as f64;
let em_size = Au::from_f64_px(self.ctfont.pt_size() as f64);
let leading = self.ctfont.leading() as f64;
let scale = px_to_pt(self.ctfont.pt_size() as f64) / (ascent + descent);
let line_gap = (ascent + descent + leading + 0.5).floor();
let max_advance_width = au_from_pt(bounding_rect.size.width as f64);
let average_advance = self
.glyph_index('0')
.and_then(|idx| self.glyph_h_advance(idx))
.map(Au::from_f64_px)
.unwrap_or(max_advance_width);
let metrics = FontMetrics {
underline_size: au_from_pt(self.ctfont.underline_thickness() as f64),
// TODO(Issue #201): underline metrics are not reliable. Have to pull out of font table
// directly.
//
// see also: https://bugs.webkit.org/show_bug.cgi?id=16768
// see also: https://bugreports.qt-project.org/browse/QTBUG-13364
underline_offset: au_from_pt(self.ctfont.underline_position() as f64),
strikeout_size: Au(0), // FIXME(Issue #942)
strikeout_offset: Au(0), // FIXME(Issue #942)
leading: au_from_pt(leading),
x_height: au_from_pt((self.ctfont.x_height() as f64) * scale),
em_size: em_size,
ascent: au_from_pt(ascent * scale),
descent: au_from_pt(descent * scale),
max_advance: max_advance_width,
average_advance: average_advance,
line_gap: Au::from_f64_px(line_gap),
};
debug!(
"Font metrics (@{} pt): {:?}",
self.ctfont.pt_size() as f64,
metrics
);
metrics
}
fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> {
let result: Option<CFData> = self.ctfont.get_font_table(tag);
result.and_then(|data| Some(FontTable::wrap(data)))
}
fn identifier(&self) -> Atom {
self.font_data.identifier.clone()
}
}
|
{
let size = match pt_size {
Some(s) => s.to_f64_px(),
None => 0.0,
};
match template.ctfont(size) {
Some(ref ctfont) => {
let mut handle = FontHandle {
font_data: template.clone(),
ctfont: ctfont.clone_with_font_size(size),
h_kern_subtable: None,
can_do_fast_shaping: false,
};
handle.h_kern_subtable = handle.find_h_kern_subtable();
// TODO (#11310): Implement basic support for GPOS and GSUB.
handle.can_do_fast_shaping = handle.h_kern_subtable.is_some() &&
handle.table_for_tag(GPOS).is_none() &&
handle.table_for_tag(GSUB).is_none();
Ok(handle)
},
|
identifier_body
|
font.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// Implementation of Quartz (CoreGraphics) fonts.
use app_units::Au;
use byteorder::{BigEndian, ByteOrder};
use core_foundation::base::CFIndex;
use core_foundation::data::CFData;
use core_foundation::string::UniChar;
use core_graphics::font::CGGlyph;
use core_graphics::geometry::CGRect;
use core_text::font::CTFont;
use core_text::font_descriptor::kCTFontDefaultOrientation;
use core_text::font_descriptor::{SymbolicTraitAccessors, TraitAccessors};
use crate::font::{
FontHandleMethods, FontMetrics, FontTableMethods, FontTableTag, FractionalPixel,
};
use crate::font::{GPOS, GSUB, KERN};
use crate::platform::font_template::FontTemplateData;
use crate::platform::macos::font_context::FontContextHandle;
use crate::text::glyph::GlyphId;
use servo_atoms::Atom;
use std::ops::Range;
use std::sync::Arc;
use std::{fmt, ptr};
use style::values::computed::font::{FontStretch, FontStyle, FontWeight};
const KERN_PAIR_LEN: usize = 6;
pub struct FontTable {
data: CFData,
}
// assumes 72 points per inch, and 96 px per inch
fn px_to_pt(px: f64) -> f64 {
px / 96. * 72.
}
// assumes 72 points per inch, and 96 px per inch
fn pt_to_px(pt: f64) -> f64 {
pt / 72. * 96.
}
fn au_from_pt(pt: f64) -> Au {
Au::from_f64_px(pt_to_px(pt))
}
impl FontTable {
pub fn wrap(data: CFData) -> FontTable {
FontTable { data: data }
}
}
impl FontTableMethods for FontTable {
fn buffer(&self) -> &[u8] {
self.data.bytes()
}
}
#[derive(Debug)]
pub struct FontHandle {
font_data: Arc<FontTemplateData>,
ctfont: CTFont,
h_kern_subtable: Option<CachedKernTable>,
can_do_fast_shaping: bool,
}
impl FontHandle {
/// Cache all the data needed for basic horizontal kerning. This is used only as a fallback or
/// fast path (when the GPOS table is missing or unnecessary) so it needn't handle every case.
fn find_h_kern_subtable(&self) -> Option<CachedKernTable> {
let font_table = self.table_for_tag(KERN)?;
let mut result = CachedKernTable {
font_table: font_table,
pair_data_range: 0..0,
px_per_font_unit: 0.0,
};
// Look for a subtable with horizontal kerning in format 0.
// https://www.microsoft.com/typography/otspec/kern.htm
const KERN_COVERAGE_HORIZONTAL_FORMAT_0: u16 = 1;
const SUBTABLE_HEADER_LEN: usize = 6;
const FORMAT_0_HEADER_LEN: usize = 8;
{
let table = result.font_table.buffer();
let version = BigEndian::read_u16(table);
if version!= 0 {
return None;
}
let num_subtables = BigEndian::read_u16(&table[2..]);
let mut start = 4;
for _ in 0..num_subtables {
// TODO: Check the subtable version number?
let len = BigEndian::read_u16(&table[start + 2..]) as usize;
let cov = BigEndian::read_u16(&table[start + 4..]);
let end = start + len;
if cov == KERN_COVERAGE_HORIZONTAL_FORMAT_0 {
// Found a matching subtable.
if result.pair_data_range.len() > 0 {
debug!("Found multiple horizontal kern tables. Disable fast path.");
return None;
}
// Read the subtable header.
let subtable_start = start + SUBTABLE_HEADER_LEN;
let n_pairs = BigEndian::read_u16(&table[subtable_start..]) as usize;
let pair_data_start = subtable_start + FORMAT_0_HEADER_LEN;
result.pair_data_range = pair_data_start..end;
if result.pair_data_range.len()!= n_pairs * KERN_PAIR_LEN {
debug!("Bad data in kern header. Disable fast path.");
return None;
}
let pt_per_font_unit =
self.ctfont.pt_size() as f64 / self.ctfont.units_per_em() as f64;
result.px_per_font_unit = pt_to_px(pt_per_font_unit);
}
start = end;
}
}
if result.pair_data_range.len() > 0 {
Some(result)
} else {
None
}
}
}
struct CachedKernTable {
font_table: FontTable,
pair_data_range: Range<usize>,
px_per_font_unit: f64,
}
impl CachedKernTable {
/// Search for a glyph pair in the kern table and return the corresponding value.
fn binary_search(&self, first_glyph: GlyphId, second_glyph: GlyphId) -> Option<i16> {
let pairs = &self.font_table.buffer()[self.pair_data_range.clone()];
let query = first_glyph << 16 | second_glyph;
let (mut start, mut end) = (0, pairs.len() / KERN_PAIR_LEN);
while start < end {
let i = (start + end) / 2;
let key = BigEndian::read_u32(&pairs[i * KERN_PAIR_LEN..]);
if key > query {
end = i;
} else if key < query {
start = i + 1;
} else {
return Some(BigEndian::read_i16(&pairs[i * KERN_PAIR_LEN + 4..]));
}
}
None
}
}
impl fmt::Debug for CachedKernTable {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "CachedKernTable")
}
}
impl FontHandleMethods for FontHandle {
fn new_from_template(
_fctx: &FontContextHandle,
template: Arc<FontTemplateData>,
pt_size: Option<Au>,
) -> Result<FontHandle, ()> {
let size = match pt_size {
Some(s) => s.to_f64_px(),
None => 0.0,
};
match template.ctfont(size) {
Some(ref ctfont) => {
let mut handle = FontHandle {
font_data: template.clone(),
ctfont: ctfont.clone_with_font_size(size),
h_kern_subtable: None,
can_do_fast_shaping: false,
};
handle.h_kern_subtable = handle.find_h_kern_subtable();
// TODO (#11310): Implement basic support for GPOS and GSUB.
handle.can_do_fast_shaping = handle.h_kern_subtable.is_some() &&
handle.table_for_tag(GPOS).is_none() &&
handle.table_for_tag(GSUB).is_none();
Ok(handle)
},
None => Err(()),
}
}
fn template(&self) -> Arc<FontTemplateData> {
self.font_data.clone()
}
fn family_name(&self) -> Option<String> {
Some(self.ctfont.family_name())
}
fn face_name(&self) -> Option<String> {
Some(self.ctfont.face_name())
}
fn style(&self) -> FontStyle {
use style::values::generics::font::FontStyle::*;
if self.ctfont.symbolic_traits().is_italic() {
Italic
} else {
Normal
}
}
fn boldness(&self) -> FontWeight {
let normalized = self.ctfont.all_traits().normalized_weight(); // [-1.0, 1.0]
// TODO(emilio): It may make sense to make this range [.01, 10.0], to
// align with css-fonts-4's range of [1, 1000].
let normalized = if normalized <= 0.0 {
4.0 + normalized * 3.0 // [1.0, 4.0]
} else {
4.0 + normalized * 5.0 // [4.0, 9.0]
}; // [1.0, 9.0], centered on 4.0
FontWeight(normalized as f32 * 100.)
}
fn stretchiness(&self) -> FontStretch {
use style::values::computed::Percentage;
use style::values::generics::NonNegative;
let normalized = self.ctfont.all_traits().normalized_width(); // [-1.0, 1.0]
FontStretch(NonNegative(Percentage(normalized as f32 + 1.0)))
}
fn glyph_index(&self, codepoint: char) -> Option<GlyphId> {
let characters: [UniChar; 1] = [codepoint as UniChar];
let mut glyphs: [CGGlyph; 1] = [0 as CGGlyph];
let count: CFIndex = 1;
let result = unsafe {
self.ctfont
.get_glyphs_for_characters(&characters[0], &mut glyphs[0], count)
};
if!result {
// No glyph for this character
return None;
}
assert_ne!(glyphs[0], 0); // FIXME: error handling
return Some(glyphs[0] as GlyphId);
}
fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId) -> FractionalPixel {
if let Some(ref table) = self.h_kern_subtable {
if let Some(font_units) = table.binary_search(first_glyph, second_glyph) {
return font_units as f64 * table.px_per_font_unit;
}
}
0.0
}
fn can_do_fast_shaping(&self) -> bool {
self.can_do_fast_shaping
}
fn glyph_h_advance(&self, glyph: GlyphId) -> Option<FractionalPixel> {
let glyphs = [glyph as CGGlyph];
let advance = unsafe {
self.ctfont.get_advances_for_glyphs(
kCTFontDefaultOrientation,
&glyphs[0],
ptr::null_mut(),
1,
)
};
Some(advance as FractionalPixel)
}
fn metrics(&self) -> FontMetrics {
let bounding_rect: CGRect = self.ctfont.bounding_box();
let ascent = self.ctfont.ascent() as f64;
let descent = self.ctfont.descent() as f64;
let em_size = Au::from_f64_px(self.ctfont.pt_size() as f64);
let leading = self.ctfont.leading() as f64;
let scale = px_to_pt(self.ctfont.pt_size() as f64) / (ascent + descent);
let line_gap = (ascent + descent + leading + 0.5).floor();
let max_advance_width = au_from_pt(bounding_rect.size.width as f64);
let average_advance = self
.glyph_index('0')
.and_then(|idx| self.glyph_h_advance(idx))
.map(Au::from_f64_px)
.unwrap_or(max_advance_width);
let metrics = FontMetrics {
underline_size: au_from_pt(self.ctfont.underline_thickness() as f64),
// TODO(Issue #201): underline metrics are not reliable. Have to pull out of font table
// directly.
//
|
// see also: https://bugreports.qt-project.org/browse/QTBUG-13364
underline_offset: au_from_pt(self.ctfont.underline_position() as f64),
strikeout_size: Au(0), // FIXME(Issue #942)
strikeout_offset: Au(0), // FIXME(Issue #942)
leading: au_from_pt(leading),
x_height: au_from_pt((self.ctfont.x_height() as f64) * scale),
em_size: em_size,
ascent: au_from_pt(ascent * scale),
descent: au_from_pt(descent * scale),
max_advance: max_advance_width,
average_advance: average_advance,
line_gap: Au::from_f64_px(line_gap),
};
debug!(
"Font metrics (@{} pt): {:?}",
self.ctfont.pt_size() as f64,
metrics
);
metrics
}
fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> {
let result: Option<CFData> = self.ctfont.get_font_table(tag);
result.and_then(|data| Some(FontTable::wrap(data)))
}
fn identifier(&self) -> Atom {
self.font_data.identifier.clone()
}
}
|
// see also: https://bugs.webkit.org/show_bug.cgi?id=16768
|
random_line_split
|
font.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// Implementation of Quartz (CoreGraphics) fonts.
use app_units::Au;
use byteorder::{BigEndian, ByteOrder};
use core_foundation::base::CFIndex;
use core_foundation::data::CFData;
use core_foundation::string::UniChar;
use core_graphics::font::CGGlyph;
use core_graphics::geometry::CGRect;
use core_text::font::CTFont;
use core_text::font_descriptor::kCTFontDefaultOrientation;
use core_text::font_descriptor::{SymbolicTraitAccessors, TraitAccessors};
use crate::font::{
FontHandleMethods, FontMetrics, FontTableMethods, FontTableTag, FractionalPixel,
};
use crate::font::{GPOS, GSUB, KERN};
use crate::platform::font_template::FontTemplateData;
use crate::platform::macos::font_context::FontContextHandle;
use crate::text::glyph::GlyphId;
use servo_atoms::Atom;
use std::ops::Range;
use std::sync::Arc;
use std::{fmt, ptr};
use style::values::computed::font::{FontStretch, FontStyle, FontWeight};
const KERN_PAIR_LEN: usize = 6;
pub struct FontTable {
data: CFData,
}
// assumes 72 points per inch, and 96 px per inch
fn px_to_pt(px: f64) -> f64 {
px / 96. * 72.
}
// assumes 72 points per inch, and 96 px per inch
fn pt_to_px(pt: f64) -> f64 {
pt / 72. * 96.
}
fn
|
(pt: f64) -> Au {
Au::from_f64_px(pt_to_px(pt))
}
impl FontTable {
pub fn wrap(data: CFData) -> FontTable {
FontTable { data: data }
}
}
impl FontTableMethods for FontTable {
fn buffer(&self) -> &[u8] {
self.data.bytes()
}
}
#[derive(Debug)]
pub struct FontHandle {
font_data: Arc<FontTemplateData>,
ctfont: CTFont,
h_kern_subtable: Option<CachedKernTable>,
can_do_fast_shaping: bool,
}
impl FontHandle {
/// Cache all the data needed for basic horizontal kerning. This is used only as a fallback or
/// fast path (when the GPOS table is missing or unnecessary) so it needn't handle every case.
fn find_h_kern_subtable(&self) -> Option<CachedKernTable> {
let font_table = self.table_for_tag(KERN)?;
let mut result = CachedKernTable {
font_table: font_table,
pair_data_range: 0..0,
px_per_font_unit: 0.0,
};
// Look for a subtable with horizontal kerning in format 0.
// https://www.microsoft.com/typography/otspec/kern.htm
const KERN_COVERAGE_HORIZONTAL_FORMAT_0: u16 = 1;
const SUBTABLE_HEADER_LEN: usize = 6;
const FORMAT_0_HEADER_LEN: usize = 8;
{
let table = result.font_table.buffer();
let version = BigEndian::read_u16(table);
if version!= 0 {
return None;
}
let num_subtables = BigEndian::read_u16(&table[2..]);
let mut start = 4;
for _ in 0..num_subtables {
// TODO: Check the subtable version number?
let len = BigEndian::read_u16(&table[start + 2..]) as usize;
let cov = BigEndian::read_u16(&table[start + 4..]);
let end = start + len;
if cov == KERN_COVERAGE_HORIZONTAL_FORMAT_0 {
// Found a matching subtable.
if result.pair_data_range.len() > 0 {
debug!("Found multiple horizontal kern tables. Disable fast path.");
return None;
}
// Read the subtable header.
let subtable_start = start + SUBTABLE_HEADER_LEN;
let n_pairs = BigEndian::read_u16(&table[subtable_start..]) as usize;
let pair_data_start = subtable_start + FORMAT_0_HEADER_LEN;
result.pair_data_range = pair_data_start..end;
if result.pair_data_range.len()!= n_pairs * KERN_PAIR_LEN {
debug!("Bad data in kern header. Disable fast path.");
return None;
}
let pt_per_font_unit =
self.ctfont.pt_size() as f64 / self.ctfont.units_per_em() as f64;
result.px_per_font_unit = pt_to_px(pt_per_font_unit);
}
start = end;
}
}
if result.pair_data_range.len() > 0 {
Some(result)
} else {
None
}
}
}
struct CachedKernTable {
font_table: FontTable,
pair_data_range: Range<usize>,
px_per_font_unit: f64,
}
impl CachedKernTable {
/// Search for a glyph pair in the kern table and return the corresponding value.
fn binary_search(&self, first_glyph: GlyphId, second_glyph: GlyphId) -> Option<i16> {
let pairs = &self.font_table.buffer()[self.pair_data_range.clone()];
let query = first_glyph << 16 | second_glyph;
let (mut start, mut end) = (0, pairs.len() / KERN_PAIR_LEN);
while start < end {
let i = (start + end) / 2;
let key = BigEndian::read_u32(&pairs[i * KERN_PAIR_LEN..]);
if key > query {
end = i;
} else if key < query {
start = i + 1;
} else {
return Some(BigEndian::read_i16(&pairs[i * KERN_PAIR_LEN + 4..]));
}
}
None
}
}
impl fmt::Debug for CachedKernTable {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "CachedKernTable")
}
}
impl FontHandleMethods for FontHandle {
fn new_from_template(
_fctx: &FontContextHandle,
template: Arc<FontTemplateData>,
pt_size: Option<Au>,
) -> Result<FontHandle, ()> {
let size = match pt_size {
Some(s) => s.to_f64_px(),
None => 0.0,
};
match template.ctfont(size) {
Some(ref ctfont) => {
let mut handle = FontHandle {
font_data: template.clone(),
ctfont: ctfont.clone_with_font_size(size),
h_kern_subtable: None,
can_do_fast_shaping: false,
};
handle.h_kern_subtable = handle.find_h_kern_subtable();
// TODO (#11310): Implement basic support for GPOS and GSUB.
handle.can_do_fast_shaping = handle.h_kern_subtable.is_some() &&
handle.table_for_tag(GPOS).is_none() &&
handle.table_for_tag(GSUB).is_none();
Ok(handle)
},
None => Err(()),
}
}
fn template(&self) -> Arc<FontTemplateData> {
self.font_data.clone()
}
fn family_name(&self) -> Option<String> {
Some(self.ctfont.family_name())
}
fn face_name(&self) -> Option<String> {
Some(self.ctfont.face_name())
}
fn style(&self) -> FontStyle {
use style::values::generics::font::FontStyle::*;
if self.ctfont.symbolic_traits().is_italic() {
Italic
} else {
Normal
}
}
fn boldness(&self) -> FontWeight {
let normalized = self.ctfont.all_traits().normalized_weight(); // [-1.0, 1.0]
// TODO(emilio): It may make sense to make this range [.01, 10.0], to
// align with css-fonts-4's range of [1, 1000].
let normalized = if normalized <= 0.0 {
4.0 + normalized * 3.0 // [1.0, 4.0]
} else {
4.0 + normalized * 5.0 // [4.0, 9.0]
}; // [1.0, 9.0], centered on 4.0
FontWeight(normalized as f32 * 100.)
}
fn stretchiness(&self) -> FontStretch {
use style::values::computed::Percentage;
use style::values::generics::NonNegative;
let normalized = self.ctfont.all_traits().normalized_width(); // [-1.0, 1.0]
FontStretch(NonNegative(Percentage(normalized as f32 + 1.0)))
}
fn glyph_index(&self, codepoint: char) -> Option<GlyphId> {
let characters: [UniChar; 1] = [codepoint as UniChar];
let mut glyphs: [CGGlyph; 1] = [0 as CGGlyph];
let count: CFIndex = 1;
let result = unsafe {
self.ctfont
.get_glyphs_for_characters(&characters[0], &mut glyphs[0], count)
};
if!result {
// No glyph for this character
return None;
}
assert_ne!(glyphs[0], 0); // FIXME: error handling
return Some(glyphs[0] as GlyphId);
}
fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId) -> FractionalPixel {
if let Some(ref table) = self.h_kern_subtable {
if let Some(font_units) = table.binary_search(first_glyph, second_glyph) {
return font_units as f64 * table.px_per_font_unit;
}
}
0.0
}
fn can_do_fast_shaping(&self) -> bool {
self.can_do_fast_shaping
}
fn glyph_h_advance(&self, glyph: GlyphId) -> Option<FractionalPixel> {
let glyphs = [glyph as CGGlyph];
let advance = unsafe {
self.ctfont.get_advances_for_glyphs(
kCTFontDefaultOrientation,
&glyphs[0],
ptr::null_mut(),
1,
)
};
Some(advance as FractionalPixel)
}
fn metrics(&self) -> FontMetrics {
let bounding_rect: CGRect = self.ctfont.bounding_box();
let ascent = self.ctfont.ascent() as f64;
let descent = self.ctfont.descent() as f64;
let em_size = Au::from_f64_px(self.ctfont.pt_size() as f64);
let leading = self.ctfont.leading() as f64;
let scale = px_to_pt(self.ctfont.pt_size() as f64) / (ascent + descent);
let line_gap = (ascent + descent + leading + 0.5).floor();
let max_advance_width = au_from_pt(bounding_rect.size.width as f64);
let average_advance = self
.glyph_index('0')
.and_then(|idx| self.glyph_h_advance(idx))
.map(Au::from_f64_px)
.unwrap_or(max_advance_width);
let metrics = FontMetrics {
underline_size: au_from_pt(self.ctfont.underline_thickness() as f64),
// TODO(Issue #201): underline metrics are not reliable. Have to pull out of font table
// directly.
//
// see also: https://bugs.webkit.org/show_bug.cgi?id=16768
// see also: https://bugreports.qt-project.org/browse/QTBUG-13364
underline_offset: au_from_pt(self.ctfont.underline_position() as f64),
strikeout_size: Au(0), // FIXME(Issue #942)
strikeout_offset: Au(0), // FIXME(Issue #942)
leading: au_from_pt(leading),
x_height: au_from_pt((self.ctfont.x_height() as f64) * scale),
em_size: em_size,
ascent: au_from_pt(ascent * scale),
descent: au_from_pt(descent * scale),
max_advance: max_advance_width,
average_advance: average_advance,
line_gap: Au::from_f64_px(line_gap),
};
debug!(
"Font metrics (@{} pt): {:?}",
self.ctfont.pt_size() as f64,
metrics
);
metrics
}
fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> {
let result: Option<CFData> = self.ctfont.get_font_table(tag);
result.and_then(|data| Some(FontTable::wrap(data)))
}
fn identifier(&self) -> Atom {
self.font_data.identifier.clone()
}
}
|
au_from_pt
|
identifier_name
|
main.rs
|
// Copyright 2017 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate coreaudio;
extern crate coremidi;
extern crate time;
extern crate synthesizer_io_core;
use coreaudio::audio_unit::{AudioUnit, IOType, SampleFormat, Scope};
use coreaudio::audio_unit::render_callback::{self, data};
use synthesizer_io_core::modules;
use synthesizer_io_core::worker::Worker;
use synthesizer_io_core::queue::Sender;
use synthesizer_io_core::graph::{Node, Message, SetParam, Note};
use synthesizer_io_core::module::N_SAMPLES_PER_CHUNK;
struct Midi {
tx: Sender<Message>,
cur_note: Option<u8>,
}
impl Midi {
fn new(tx: Sender<Message>) -> Midi {
Midi {
tx: tx,
cur_note: None,
}
}
fn send(&self, msg: Message) {
self.tx.send(msg);
}
fn set_ctrl_const(&mut self, value: u8, lo: f32, hi: f32, ix: usize, ts: u64) {
let value = lo + value as f32 * (1.0/127.0) * (hi - lo);
let param = SetParam {
ix: ix,
param_ix: 0,
val: value,
timestamp: ts,
};
self.send(Message::SetParam(param));
}
fn send_note(&mut self, ixs: Vec<usize>, midi_num: f32, velocity: f32, on: bool,
ts: u64)
{
let note = Note {
ixs: ixs.into_boxed_slice(),
midi_num: midi_num,
velocity: velocity,
on: on,
timestamp: ts,
};
self.send(Message::Note(note));
}
fn dispatch_midi(&mut self, data: &[u8], ts: u64) {
let mut i = 0;
while i < data.len() {
if data[i] == 0xb0 {
let controller = data[i + 1];
let value = data[i + 2];
match controller {
1 => self.set_ctrl_const(value, 0.0, 22_000f32.log2(), 3, ts),
2 => self.set_ctrl_const(value, 0.0, 0.995, 4, ts),
3 => self.set_ctrl_const(value, 0.0, 22_000f32.log2(), 5, ts),
5 => self.set_ctrl_const(value, 0.0, 10.0, 11, ts),
6 => self.set_ctrl_const(value, 0.0, 10.0, 12, ts),
7 => self.set_ctrl_const(value, 0.0, 6.0, 13, ts),
8 => self.set_ctrl_const(value, 0.0, 10.0, 14, ts),
_ => println!("don't have handler for controller {}", controller),
}
i += 3;
} else if data[i] == 0x90 || data[i] == 0x80 {
let midi_num = data[i + 1];
let velocity = data[i + 2];
let on = data[i] == 0x90 && velocity > 0;
if on || self.cur_note == Some(midi_num) {
self.send_note(vec![5, 7], midi_num as f32, velocity as f32, on, ts);
|
break;
}
}
}
}
fn main() {
let (mut worker, tx, rx) = Worker::create(1024);
/*
let module = Box::new(modules::ConstCtrl::new(440.0f32.log2()));
worker.handle_node(Node::create(module, 1, [], []));
let module = Box::new(modules::Sin::new(44_100.0));
worker.handle_node(Node::create(module, 2, [], [(1, 0)]));
let module = Box::new(modules::ConstCtrl::new(880.0f32.log2()));
worker.handle_node(Node::create(module, 3, [], []));
let module = Box::new(modules::Sin::new(44_100.0));
worker.handle_node(Node::create(module, 4, [], [(3, 0)]));
let module = Box::new(modules::Sum);
worker.handle_node(Node::create(module, 0, [(2, 0), (4, 0)], []));
*/
let module = Box::new(modules::Saw::new(44_100.0));
worker.handle_node(Node::create(module, 1, [], [(5, 0)]));
let module = Box::new(modules::SmoothCtrl::new(880.0f32.log2()));
worker.handle_node(Node::create(module, 3, [], []));
let module = Box::new(modules::SmoothCtrl::new(0.5));
worker.handle_node(Node::create(module, 4, [], []));
let module = Box::new(modules::NotePitch::new());
worker.handle_node(Node::create(module, 5, [], []));
let module = Box::new(modules::Biquad::new(44_100.0));
worker.handle_node(Node::create(module, 6, [(1,0)], [(3, 0), (4, 0)]));
let module = Box::new(modules::Adsr::new());
worker.handle_node(Node::create(module, 7, [], vec![(11, 0), (12, 0), (13, 0), (14, 0)]));
let module = Box::new(modules::Gain::new());
worker.handle_node(Node::create(module, 0, [(6, 0)], [(7, 0)]));
let module = Box::new(modules::SmoothCtrl::new(5.0));
worker.handle_node(Node::create(module, 11, [], []));
let module = Box::new(modules::SmoothCtrl::new(5.0));
worker.handle_node(Node::create(module, 12, [], []));
let module = Box::new(modules::SmoothCtrl::new(4.0));
worker.handle_node(Node::create(module, 13, [], []));
let module = Box::new(modules::SmoothCtrl::new(5.0));
worker.handle_node(Node::create(module, 14, [], []));
let _audio_unit = run(worker).unwrap();
let source_index = 0;
if let Some(source) = coremidi::Source::from_index(source_index) {
println!("Listening for midi from {}", source.display_name().unwrap());
let client = coremidi::Client::new("synthesizer-client").unwrap();
let mut last_ts = 0;
let mut last_val = 0;
let mut midi = Midi::new(tx);
let callback = move |packet_list: &coremidi::PacketList| {
for packet in packet_list.iter() {
let data = packet.data();
let delta_t = packet.timestamp() - last_ts;
let speed = 1e9 * (data[2] as f64 - last_val as f64) / delta_t as f64;
println!("{} {:3.3} {} {}", speed, delta_t as f64 * 1e-6, data[2],
time::precise_time_ns() - packet.timestamp());
last_val = data[2];
last_ts = packet.timestamp();
midi.dispatch_midi(&data, last_ts);
}
};
let input_port = client.input_port("synthesizer-port", callback).unwrap();
input_port.connect_source(&source).unwrap();
println!("Press Enter to exit.");
let mut line = String::new();
::std::io::stdin().read_line(&mut line).unwrap();
input_port.disconnect_source(&source).unwrap();
} else {
println!("No midi available");
}
}
fn run(mut worker: Worker) -> Result<AudioUnit, coreaudio::Error> {
// Construct an Output audio unit that delivers audio to the default output device.
let mut audio_unit = AudioUnit::new(IOType::DefaultOutput)?;
let stream_format = audio_unit.stream_format(Scope::Output)?;
//println!("{:#?}", &stream_format);
// We expect `f32` data.
assert!(SampleFormat::F32 == stream_format.sample_format);
type Args = render_callback::Args<data::NonInterleaved<f32>>;
audio_unit.set_render_callback(move |args| {
let Args { num_frames, mut data,.. }: Args = args;
assert!(num_frames % N_SAMPLES_PER_CHUNK == 0);
let mut i = 0;
let mut timestamp = time::precise_time_ns();
while i < num_frames {
// should let the graph generate stereo
let buf = worker.work(timestamp)[0].get();
for j in 0..N_SAMPLES_PER_CHUNK {
for channel in data.channels_mut() {
channel[i + j] = buf[j];
}
}
timestamp += 1451247; // 64 * 1e9 / 44_100
i += N_SAMPLES_PER_CHUNK;
}
Ok(())
})?;
audio_unit.start()?;
Ok(audio_unit)
}
|
self.cur_note = if on { Some(midi_num) } else { None }
}
i += 3;
} else {
|
random_line_split
|
main.rs
|
// Copyright 2017 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate coreaudio;
extern crate coremidi;
extern crate time;
extern crate synthesizer_io_core;
use coreaudio::audio_unit::{AudioUnit, IOType, SampleFormat, Scope};
use coreaudio::audio_unit::render_callback::{self, data};
use synthesizer_io_core::modules;
use synthesizer_io_core::worker::Worker;
use synthesizer_io_core::queue::Sender;
use synthesizer_io_core::graph::{Node, Message, SetParam, Note};
use synthesizer_io_core::module::N_SAMPLES_PER_CHUNK;
struct Midi {
tx: Sender<Message>,
cur_note: Option<u8>,
}
impl Midi {
fn new(tx: Sender<Message>) -> Midi {
Midi {
tx: tx,
cur_note: None,
}
}
fn
|
(&self, msg: Message) {
self.tx.send(msg);
}
fn set_ctrl_const(&mut self, value: u8, lo: f32, hi: f32, ix: usize, ts: u64) {
let value = lo + value as f32 * (1.0/127.0) * (hi - lo);
let param = SetParam {
ix: ix,
param_ix: 0,
val: value,
timestamp: ts,
};
self.send(Message::SetParam(param));
}
fn send_note(&mut self, ixs: Vec<usize>, midi_num: f32, velocity: f32, on: bool,
ts: u64)
{
let note = Note {
ixs: ixs.into_boxed_slice(),
midi_num: midi_num,
velocity: velocity,
on: on,
timestamp: ts,
};
self.send(Message::Note(note));
}
fn dispatch_midi(&mut self, data: &[u8], ts: u64) {
let mut i = 0;
while i < data.len() {
if data[i] == 0xb0 {
let controller = data[i + 1];
let value = data[i + 2];
match controller {
1 => self.set_ctrl_const(value, 0.0, 22_000f32.log2(), 3, ts),
2 => self.set_ctrl_const(value, 0.0, 0.995, 4, ts),
3 => self.set_ctrl_const(value, 0.0, 22_000f32.log2(), 5, ts),
5 => self.set_ctrl_const(value, 0.0, 10.0, 11, ts),
6 => self.set_ctrl_const(value, 0.0, 10.0, 12, ts),
7 => self.set_ctrl_const(value, 0.0, 6.0, 13, ts),
8 => self.set_ctrl_const(value, 0.0, 10.0, 14, ts),
_ => println!("don't have handler for controller {}", controller),
}
i += 3;
} else if data[i] == 0x90 || data[i] == 0x80 {
let midi_num = data[i + 1];
let velocity = data[i + 2];
let on = data[i] == 0x90 && velocity > 0;
if on || self.cur_note == Some(midi_num) {
self.send_note(vec![5, 7], midi_num as f32, velocity as f32, on, ts);
self.cur_note = if on { Some(midi_num) } else { None }
}
i += 3;
} else {
break;
}
}
}
}
fn main() {
let (mut worker, tx, rx) = Worker::create(1024);
/*
let module = Box::new(modules::ConstCtrl::new(440.0f32.log2()));
worker.handle_node(Node::create(module, 1, [], []));
let module = Box::new(modules::Sin::new(44_100.0));
worker.handle_node(Node::create(module, 2, [], [(1, 0)]));
let module = Box::new(modules::ConstCtrl::new(880.0f32.log2()));
worker.handle_node(Node::create(module, 3, [], []));
let module = Box::new(modules::Sin::new(44_100.0));
worker.handle_node(Node::create(module, 4, [], [(3, 0)]));
let module = Box::new(modules::Sum);
worker.handle_node(Node::create(module, 0, [(2, 0), (4, 0)], []));
*/
let module = Box::new(modules::Saw::new(44_100.0));
worker.handle_node(Node::create(module, 1, [], [(5, 0)]));
let module = Box::new(modules::SmoothCtrl::new(880.0f32.log2()));
worker.handle_node(Node::create(module, 3, [], []));
let module = Box::new(modules::SmoothCtrl::new(0.5));
worker.handle_node(Node::create(module, 4, [], []));
let module = Box::new(modules::NotePitch::new());
worker.handle_node(Node::create(module, 5, [], []));
let module = Box::new(modules::Biquad::new(44_100.0));
worker.handle_node(Node::create(module, 6, [(1,0)], [(3, 0), (4, 0)]));
let module = Box::new(modules::Adsr::new());
worker.handle_node(Node::create(module, 7, [], vec![(11, 0), (12, 0), (13, 0), (14, 0)]));
let module = Box::new(modules::Gain::new());
worker.handle_node(Node::create(module, 0, [(6, 0)], [(7, 0)]));
let module = Box::new(modules::SmoothCtrl::new(5.0));
worker.handle_node(Node::create(module, 11, [], []));
let module = Box::new(modules::SmoothCtrl::new(5.0));
worker.handle_node(Node::create(module, 12, [], []));
let module = Box::new(modules::SmoothCtrl::new(4.0));
worker.handle_node(Node::create(module, 13, [], []));
let module = Box::new(modules::SmoothCtrl::new(5.0));
worker.handle_node(Node::create(module, 14, [], []));
let _audio_unit = run(worker).unwrap();
let source_index = 0;
if let Some(source) = coremidi::Source::from_index(source_index) {
println!("Listening for midi from {}", source.display_name().unwrap());
let client = coremidi::Client::new("synthesizer-client").unwrap();
let mut last_ts = 0;
let mut last_val = 0;
let mut midi = Midi::new(tx);
let callback = move |packet_list: &coremidi::PacketList| {
for packet in packet_list.iter() {
let data = packet.data();
let delta_t = packet.timestamp() - last_ts;
let speed = 1e9 * (data[2] as f64 - last_val as f64) / delta_t as f64;
println!("{} {:3.3} {} {}", speed, delta_t as f64 * 1e-6, data[2],
time::precise_time_ns() - packet.timestamp());
last_val = data[2];
last_ts = packet.timestamp();
midi.dispatch_midi(&data, last_ts);
}
};
let input_port = client.input_port("synthesizer-port", callback).unwrap();
input_port.connect_source(&source).unwrap();
println!("Press Enter to exit.");
let mut line = String::new();
::std::io::stdin().read_line(&mut line).unwrap();
input_port.disconnect_source(&source).unwrap();
} else {
println!("No midi available");
}
}
fn run(mut worker: Worker) -> Result<AudioUnit, coreaudio::Error> {
// Construct an Output audio unit that delivers audio to the default output device.
let mut audio_unit = AudioUnit::new(IOType::DefaultOutput)?;
let stream_format = audio_unit.stream_format(Scope::Output)?;
//println!("{:#?}", &stream_format);
// We expect `f32` data.
assert!(SampleFormat::F32 == stream_format.sample_format);
type Args = render_callback::Args<data::NonInterleaved<f32>>;
audio_unit.set_render_callback(move |args| {
let Args { num_frames, mut data,.. }: Args = args;
assert!(num_frames % N_SAMPLES_PER_CHUNK == 0);
let mut i = 0;
let mut timestamp = time::precise_time_ns();
while i < num_frames {
// should let the graph generate stereo
let buf = worker.work(timestamp)[0].get();
for j in 0..N_SAMPLES_PER_CHUNK {
for channel in data.channels_mut() {
channel[i + j] = buf[j];
}
}
timestamp += 1451247; // 64 * 1e9 / 44_100
i += N_SAMPLES_PER_CHUNK;
}
Ok(())
})?;
audio_unit.start()?;
Ok(audio_unit)
}
|
send
|
identifier_name
|
main.rs
|
// Copyright 2017 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate coreaudio;
extern crate coremidi;
extern crate time;
extern crate synthesizer_io_core;
use coreaudio::audio_unit::{AudioUnit, IOType, SampleFormat, Scope};
use coreaudio::audio_unit::render_callback::{self, data};
use synthesizer_io_core::modules;
use synthesizer_io_core::worker::Worker;
use synthesizer_io_core::queue::Sender;
use synthesizer_io_core::graph::{Node, Message, SetParam, Note};
use synthesizer_io_core::module::N_SAMPLES_PER_CHUNK;
struct Midi {
tx: Sender<Message>,
cur_note: Option<u8>,
}
impl Midi {
fn new(tx: Sender<Message>) -> Midi {
Midi {
tx: tx,
cur_note: None,
}
}
fn send(&self, msg: Message) {
self.tx.send(msg);
}
fn set_ctrl_const(&mut self, value: u8, lo: f32, hi: f32, ix: usize, ts: u64) {
let value = lo + value as f32 * (1.0/127.0) * (hi - lo);
let param = SetParam {
ix: ix,
param_ix: 0,
val: value,
timestamp: ts,
};
self.send(Message::SetParam(param));
}
fn send_note(&mut self, ixs: Vec<usize>, midi_num: f32, velocity: f32, on: bool,
ts: u64)
{
let note = Note {
ixs: ixs.into_boxed_slice(),
midi_num: midi_num,
velocity: velocity,
on: on,
timestamp: ts,
};
self.send(Message::Note(note));
}
fn dispatch_midi(&mut self, data: &[u8], ts: u64) {
let mut i = 0;
while i < data.len() {
if data[i] == 0xb0
|
else if data[i] == 0x90 || data[i] == 0x80 {
let midi_num = data[i + 1];
let velocity = data[i + 2];
let on = data[i] == 0x90 && velocity > 0;
if on || self.cur_note == Some(midi_num) {
self.send_note(vec![5, 7], midi_num as f32, velocity as f32, on, ts);
self.cur_note = if on { Some(midi_num) } else { None }
}
i += 3;
} else {
break;
}
}
}
}
fn main() {
let (mut worker, tx, rx) = Worker::create(1024);
/*
let module = Box::new(modules::ConstCtrl::new(440.0f32.log2()));
worker.handle_node(Node::create(module, 1, [], []));
let module = Box::new(modules::Sin::new(44_100.0));
worker.handle_node(Node::create(module, 2, [], [(1, 0)]));
let module = Box::new(modules::ConstCtrl::new(880.0f32.log2()));
worker.handle_node(Node::create(module, 3, [], []));
let module = Box::new(modules::Sin::new(44_100.0));
worker.handle_node(Node::create(module, 4, [], [(3, 0)]));
let module = Box::new(modules::Sum);
worker.handle_node(Node::create(module, 0, [(2, 0), (4, 0)], []));
*/
let module = Box::new(modules::Saw::new(44_100.0));
worker.handle_node(Node::create(module, 1, [], [(5, 0)]));
let module = Box::new(modules::SmoothCtrl::new(880.0f32.log2()));
worker.handle_node(Node::create(module, 3, [], []));
let module = Box::new(modules::SmoothCtrl::new(0.5));
worker.handle_node(Node::create(module, 4, [], []));
let module = Box::new(modules::NotePitch::new());
worker.handle_node(Node::create(module, 5, [], []));
let module = Box::new(modules::Biquad::new(44_100.0));
worker.handle_node(Node::create(module, 6, [(1,0)], [(3, 0), (4, 0)]));
let module = Box::new(modules::Adsr::new());
worker.handle_node(Node::create(module, 7, [], vec![(11, 0), (12, 0), (13, 0), (14, 0)]));
let module = Box::new(modules::Gain::new());
worker.handle_node(Node::create(module, 0, [(6, 0)], [(7, 0)]));
let module = Box::new(modules::SmoothCtrl::new(5.0));
worker.handle_node(Node::create(module, 11, [], []));
let module = Box::new(modules::SmoothCtrl::new(5.0));
worker.handle_node(Node::create(module, 12, [], []));
let module = Box::new(modules::SmoothCtrl::new(4.0));
worker.handle_node(Node::create(module, 13, [], []));
let module = Box::new(modules::SmoothCtrl::new(5.0));
worker.handle_node(Node::create(module, 14, [], []));
let _audio_unit = run(worker).unwrap();
let source_index = 0;
if let Some(source) = coremidi::Source::from_index(source_index) {
println!("Listening for midi from {}", source.display_name().unwrap());
let client = coremidi::Client::new("synthesizer-client").unwrap();
let mut last_ts = 0;
let mut last_val = 0;
let mut midi = Midi::new(tx);
let callback = move |packet_list: &coremidi::PacketList| {
for packet in packet_list.iter() {
let data = packet.data();
let delta_t = packet.timestamp() - last_ts;
let speed = 1e9 * (data[2] as f64 - last_val as f64) / delta_t as f64;
println!("{} {:3.3} {} {}", speed, delta_t as f64 * 1e-6, data[2],
time::precise_time_ns() - packet.timestamp());
last_val = data[2];
last_ts = packet.timestamp();
midi.dispatch_midi(&data, last_ts);
}
};
let input_port = client.input_port("synthesizer-port", callback).unwrap();
input_port.connect_source(&source).unwrap();
println!("Press Enter to exit.");
let mut line = String::new();
::std::io::stdin().read_line(&mut line).unwrap();
input_port.disconnect_source(&source).unwrap();
} else {
println!("No midi available");
}
}
fn run(mut worker: Worker) -> Result<AudioUnit, coreaudio::Error> {
// Construct an Output audio unit that delivers audio to the default output device.
let mut audio_unit = AudioUnit::new(IOType::DefaultOutput)?;
let stream_format = audio_unit.stream_format(Scope::Output)?;
//println!("{:#?}", &stream_format);
// We expect `f32` data.
assert!(SampleFormat::F32 == stream_format.sample_format);
type Args = render_callback::Args<data::NonInterleaved<f32>>;
audio_unit.set_render_callback(move |args| {
let Args { num_frames, mut data,.. }: Args = args;
assert!(num_frames % N_SAMPLES_PER_CHUNK == 0);
let mut i = 0;
let mut timestamp = time::precise_time_ns();
while i < num_frames {
// should let the graph generate stereo
let buf = worker.work(timestamp)[0].get();
for j in 0..N_SAMPLES_PER_CHUNK {
for channel in data.channels_mut() {
channel[i + j] = buf[j];
}
}
timestamp += 1451247; // 64 * 1e9 / 44_100
i += N_SAMPLES_PER_CHUNK;
}
Ok(())
})?;
audio_unit.start()?;
Ok(audio_unit)
}
|
{
let controller = data[i + 1];
let value = data[i + 2];
match controller {
1 => self.set_ctrl_const(value, 0.0, 22_000f32.log2(), 3, ts),
2 => self.set_ctrl_const(value, 0.0, 0.995, 4, ts),
3 => self.set_ctrl_const(value, 0.0, 22_000f32.log2(), 5, ts),
5 => self.set_ctrl_const(value, 0.0, 10.0, 11, ts),
6 => self.set_ctrl_const(value, 0.0, 10.0, 12, ts),
7 => self.set_ctrl_const(value, 0.0, 6.0, 13, ts),
8 => self.set_ctrl_const(value, 0.0, 10.0, 14, ts),
_ => println!("don't have handler for controller {}", controller),
}
i += 3;
}
|
conditional_block
|
movedex.rs
|
extern crate csv;
use super::enums;
use super::moves::Technique;
use enum_primitive::FromPrimitive;
use std::collections::HashMap;
/// Manages the list of moves that are available. Contains a bool that is true whenever all
/// available moves are inside the entries to make an easier search possible.
/// By now the whole movedex contains 617 moves, which are nearly all moves from the main game
/// series. 4 Moves are missing due to missing data in the used database.
#[derive(Debug, Clone)]
pub struct Movedex {
entries: Vec<Technique>,
complete: bool,
}
// TODO: last 4 attacks are missing in move_meta.csv, therefore are not implemented right now.
// DB must be extended and if statements adjusted accordingly
impl Movedex {
/// Takes an ID and a movedex and returns an option with the move that can be find with the
/// given ID. Returns None if the ID isn't in the movedex.
pub fn move_by_id(&self, id: usize) -> Option<Technique> {
if id < 617 && self.is_complete() {
return Some(self.get_entries()[id - 1].clone());
} else if id < 617 {
for entry in self.entries.clone() {
if entry.get_id() == id {
return Some(entry);
}
}
}
None
}
/// Returns a list of all learnable moves by level for a specific pokemon with a specific
/// level.
pub fn for_token(&self, level: u16, id: usize) -> Movedex
|
move_tmp.clone().unwrap().get_name() == "fling" ||
move_tmp.clone().unwrap().get_name() == "trump-card" ||
move_tmp.clone().unwrap().get_name() == "me-first" ||
move_tmp.clone().unwrap().get_category() == enums::MoveCategory::Unique) ||
(move_tmp.clone().unwrap().get_category() == enums::MoveCategory::Unique &&
(move_tmp.clone().unwrap().get_name() == "teleport" ||
move_tmp.clone().unwrap().get_name() == "mimic" ||
move_tmp.clone().unwrap().get_name() == "metronome" ||
move_tmp.clone().unwrap().get_name() == "mirror-move" ||
move_tmp.clone().unwrap().get_name() == "nature-power" ||
move_tmp.clone().unwrap().get_name() == "splash" ||
//move_tmp.clone().unwrap().get_name() == "rest" ||
move_tmp.clone().unwrap().get_name() == "conversion" ||
move_tmp.clone().unwrap().get_name() == "spite" ||
move_tmp.clone().unwrap().get_name() == "sleep-talk" ||
move_tmp.clone().unwrap().get_name() == "celebrate" ||
move_tmp.clone().unwrap().get_name() == "powder" ||
move_tmp.clone().unwrap().get_name() == "reflect-type" ||
move_tmp.clone().unwrap().get_name() == "soak")) {
new_dex.push(self.move_by_id(move_id).unwrap());
}
}
}
new_dex.sort();
new_dex.dedup();
Movedex {
entries: new_dex,
complete: false,
}
}
/// Returns the entry field of a movedex.
pub fn get_entries(&self) -> Vec<Technique> {
self.entries.clone()
}
/// Returns true if the movedex contains all possible moves, and false if not.
fn is_complete(&self) -> bool {
self.complete
}
/// Creates a complete Movedex from the type_efficacy, moves_whole and move_flag_map databases
/// in the table folder.
pub fn new() -> Movedex {
// In the first step creates a vec with the effectivities for every type.
let mut effectivity = Vec::new();
let mut effective_db = csv::Reader::from_file("./src/db/tables/type_efficacy.csv").unwrap();
for record in effective_db.decode() {
let (off, def, factor): (i32, i32, u8) = record.unwrap();
effectivity.push((off, def, factor));
}
// Creates the main part with most simpel values and directly adds a Hash Map for the type
// efficiency of the move.
let mut moves = Vec::new();
let mut move_db = csv::Reader::from_file("./src/db/tables/moves_whole.csv").unwrap();
for record in move_db.decode() {
let mut move_tmp: Technique = record.unwrap();
let mut effective_hash = HashMap::new();
for entry in effectivity.clone() {
if entry.0 == move_tmp.get_type() as i32 && entry.2!= 100 {
let eff_id = match entry.2 {
0 => -4,
50 => -1,
200 => 1,
_ => unreachable!(),
};
effective_hash.insert(enums::Types::from_i32(entry.1).unwrap(), eff_id);
move_tmp.set_effectivity_map(effective_hash.clone());
}
}
moves.push(move_tmp);
}
// Adds all flags, that are valid for the moves.
let mut flags = Vec::new();
let mut last_id = 1;
let mut flag_db = csv::Reader::from_file("./src/db/tables/move_flag_map.csv").unwrap();
for record in flag_db.decode() {
let (id, identifier): (usize, i32) = record.unwrap();
if id < 617 {
if!(id == last_id) {
moves[last_id - 1].set_flags(flags);
last_id = id;
flags = Vec::new();
}
flags.push(enums::MoveFlags::from_i32(identifier).unwrap());
}
}
Movedex {
entries: moves,
complete: true,
}
}
}
|
{
let mut new_dex = Vec::new();
let mut move_db = csv::Reader::from_file("./src/db/tables/pokemon_moves.csv").unwrap();
for record in move_db.decode() {
let (poke_id, version, move_id, _, move_level, _): (usize,
u8,
usize,
usize,
u16,
Option<usize>) = record.unwrap();
if move_id < 617 && move_level <= level && poke_id == id && version == 16 {
let move_tmp = self.move_by_id(move_id);
// ifs are needed to exclude unimplemented moves from the list
if move_tmp.clone().is_some() &&
!(move_tmp.clone().unwrap().get_name() == "counter" ||
move_tmp.clone().unwrap().get_name() == "bide" ||
move_tmp.clone().unwrap().get_name() == "mirror-coat" ||
move_tmp.clone().unwrap().get_name() == "spit-up" ||
move_tmp.clone().unwrap().get_name() == "natural-gift" ||
move_tmp.clone().unwrap().get_name() == "metal-burst" ||
|
identifier_body
|
movedex.rs
|
extern crate csv;
use super::enums;
use super::moves::Technique;
use enum_primitive::FromPrimitive;
use std::collections::HashMap;
/// Manages the list of moves that are available. Contains a bool that is true whenever all
/// available moves are inside the entries to make an easier search possible.
/// By now the whole movedex contains 617 moves, which are nearly all moves from the main game
/// series. 4 Moves are missing due to missing data in the used database.
#[derive(Debug, Clone)]
pub struct Movedex {
entries: Vec<Technique>,
complete: bool,
}
// TODO: last 4 attacks are missing in move_meta.csv, therefore are not implemented right now.
// DB must be extended and if statements adjusted accordingly
impl Movedex {
/// Takes an ID and a movedex and returns an option with the move that can be find with the
/// given ID. Returns None if the ID isn't in the movedex.
pub fn move_by_id(&self, id: usize) -> Option<Technique> {
if id < 617 && self.is_complete() {
return Some(self.get_entries()[id - 1].clone());
} else if id < 617 {
for entry in self.entries.clone() {
if entry.get_id() == id {
return Some(entry);
}
}
}
None
}
/// Returns a list of all learnable moves by level for a specific pokemon with a specific
/// level.
pub fn for_token(&self, level: u16, id: usize) -> Movedex {
let mut new_dex = Vec::new();
let mut move_db = csv::Reader::from_file("./src/db/tables/pokemon_moves.csv").unwrap();
for record in move_db.decode() {
let (poke_id, version, move_id, _, move_level, _): (usize,
u8,
usize,
usize,
u16,
Option<usize>) = record.unwrap();
if move_id < 617 && move_level <= level && poke_id == id && version == 16 {
let move_tmp = self.move_by_id(move_id);
// ifs are needed to exclude unimplemented moves from the list
if move_tmp.clone().is_some() &&
!(move_tmp.clone().unwrap().get_name() == "counter" ||
move_tmp.clone().unwrap().get_name() == "bide" ||
move_tmp.clone().unwrap().get_name() == "mirror-coat" ||
move_tmp.clone().unwrap().get_name() == "spit-up" ||
move_tmp.clone().unwrap().get_name() == "natural-gift" ||
move_tmp.clone().unwrap().get_name() == "metal-burst" ||
move_tmp.clone().unwrap().get_name() == "fling" ||
move_tmp.clone().unwrap().get_name() == "trump-card" ||
move_tmp.clone().unwrap().get_name() == "me-first" ||
move_tmp.clone().unwrap().get_category() == enums::MoveCategory::Unique) ||
(move_tmp.clone().unwrap().get_category() == enums::MoveCategory::Unique &&
(move_tmp.clone().unwrap().get_name() == "teleport" ||
move_tmp.clone().unwrap().get_name() == "mimic" ||
move_tmp.clone().unwrap().get_name() == "metronome" ||
move_tmp.clone().unwrap().get_name() == "mirror-move" ||
move_tmp.clone().unwrap().get_name() == "nature-power" ||
move_tmp.clone().unwrap().get_name() == "splash" ||
//move_tmp.clone().unwrap().get_name() == "rest" ||
move_tmp.clone().unwrap().get_name() == "conversion" ||
move_tmp.clone().unwrap().get_name() == "spite" ||
move_tmp.clone().unwrap().get_name() == "sleep-talk" ||
move_tmp.clone().unwrap().get_name() == "celebrate" ||
move_tmp.clone().unwrap().get_name() == "powder" ||
move_tmp.clone().unwrap().get_name() == "reflect-type" ||
move_tmp.clone().unwrap().get_name() == "soak")) {
new_dex.push(self.move_by_id(move_id).unwrap());
}
}
}
new_dex.sort();
new_dex.dedup();
Movedex {
entries: new_dex,
complete: false,
}
}
/// Returns the entry field of a movedex.
pub fn get_entries(&self) -> Vec<Technique> {
self.entries.clone()
}
/// Returns true if the movedex contains all possible moves, and false if not.
fn is_complete(&self) -> bool {
self.complete
}
/// Creates a complete Movedex from the type_efficacy, moves_whole and move_flag_map databases
/// in the table folder.
pub fn new() -> Movedex {
// In the first step creates a vec with the effectivities for every type.
let mut effectivity = Vec::new();
let mut effective_db = csv::Reader::from_file("./src/db/tables/type_efficacy.csv").unwrap();
for record in effective_db.decode() {
let (off, def, factor): (i32, i32, u8) = record.unwrap();
effectivity.push((off, def, factor));
}
// Creates the main part with most simpel values and directly adds a Hash Map for the type
// efficiency of the move.
let mut moves = Vec::new();
let mut move_db = csv::Reader::from_file("./src/db/tables/moves_whole.csv").unwrap();
for record in move_db.decode() {
let mut move_tmp: Technique = record.unwrap();
let mut effective_hash = HashMap::new();
for entry in effectivity.clone() {
if entry.0 == move_tmp.get_type() as i32 && entry.2!= 100 {
let eff_id = match entry.2 {
0 => -4,
50 => -1,
200 => 1,
_ => unreachable!(),
};
effective_hash.insert(enums::Types::from_i32(entry.1).unwrap(), eff_id);
move_tmp.set_effectivity_map(effective_hash.clone());
}
}
moves.push(move_tmp);
}
// Adds all flags, that are valid for the moves.
let mut flags = Vec::new();
let mut last_id = 1;
let mut flag_db = csv::Reader::from_file("./src/db/tables/move_flag_map.csv").unwrap();
for record in flag_db.decode() {
let (id, identifier): (usize, i32) = record.unwrap();
if id < 617
|
}
Movedex {
entries: moves,
complete: true,
}
}
}
|
{
if !(id == last_id) {
moves[last_id - 1].set_flags(flags);
last_id = id;
flags = Vec::new();
}
flags.push(enums::MoveFlags::from_i32(identifier).unwrap());
}
|
conditional_block
|
movedex.rs
|
extern crate csv;
use super::enums;
use super::moves::Technique;
use enum_primitive::FromPrimitive;
use std::collections::HashMap;
/// Manages the list of moves that are available. Contains a bool that is true whenever all
/// available moves are inside the entries to make an easier search possible.
/// By now the whole movedex contains 617 moves, which are nearly all moves from the main game
/// series. 4 Moves are missing due to missing data in the used database.
#[derive(Debug, Clone)]
pub struct
|
{
entries: Vec<Technique>,
complete: bool,
}
// TODO: last 4 attacks are missing in move_meta.csv, therefore are not implemented right now.
// DB must be extended and if statements adjusted accordingly
impl Movedex {
/// Takes an ID and a movedex and returns an option with the move that can be find with the
/// given ID. Returns None if the ID isn't in the movedex.
pub fn move_by_id(&self, id: usize) -> Option<Technique> {
if id < 617 && self.is_complete() {
return Some(self.get_entries()[id - 1].clone());
} else if id < 617 {
for entry in self.entries.clone() {
if entry.get_id() == id {
return Some(entry);
}
}
}
None
}
/// Returns a list of all learnable moves by level for a specific pokemon with a specific
/// level.
pub fn for_token(&self, level: u16, id: usize) -> Movedex {
let mut new_dex = Vec::new();
let mut move_db = csv::Reader::from_file("./src/db/tables/pokemon_moves.csv").unwrap();
for record in move_db.decode() {
let (poke_id, version, move_id, _, move_level, _): (usize,
u8,
usize,
usize,
u16,
Option<usize>) = record.unwrap();
if move_id < 617 && move_level <= level && poke_id == id && version == 16 {
let move_tmp = self.move_by_id(move_id);
// ifs are needed to exclude unimplemented moves from the list
if move_tmp.clone().is_some() &&
!(move_tmp.clone().unwrap().get_name() == "counter" ||
move_tmp.clone().unwrap().get_name() == "bide" ||
move_tmp.clone().unwrap().get_name() == "mirror-coat" ||
move_tmp.clone().unwrap().get_name() == "spit-up" ||
move_tmp.clone().unwrap().get_name() == "natural-gift" ||
move_tmp.clone().unwrap().get_name() == "metal-burst" ||
move_tmp.clone().unwrap().get_name() == "fling" ||
move_tmp.clone().unwrap().get_name() == "trump-card" ||
move_tmp.clone().unwrap().get_name() == "me-first" ||
move_tmp.clone().unwrap().get_category() == enums::MoveCategory::Unique) ||
(move_tmp.clone().unwrap().get_category() == enums::MoveCategory::Unique &&
(move_tmp.clone().unwrap().get_name() == "teleport" ||
move_tmp.clone().unwrap().get_name() == "mimic" ||
move_tmp.clone().unwrap().get_name() == "metronome" ||
move_tmp.clone().unwrap().get_name() == "mirror-move" ||
move_tmp.clone().unwrap().get_name() == "nature-power" ||
move_tmp.clone().unwrap().get_name() == "splash" ||
//move_tmp.clone().unwrap().get_name() == "rest" ||
move_tmp.clone().unwrap().get_name() == "conversion" ||
move_tmp.clone().unwrap().get_name() == "spite" ||
move_tmp.clone().unwrap().get_name() == "sleep-talk" ||
move_tmp.clone().unwrap().get_name() == "celebrate" ||
move_tmp.clone().unwrap().get_name() == "powder" ||
move_tmp.clone().unwrap().get_name() == "reflect-type" ||
move_tmp.clone().unwrap().get_name() == "soak")) {
new_dex.push(self.move_by_id(move_id).unwrap());
}
}
}
new_dex.sort();
new_dex.dedup();
Movedex {
entries: new_dex,
complete: false,
}
}
/// Returns the entry field of a movedex.
pub fn get_entries(&self) -> Vec<Technique> {
self.entries.clone()
}
/// Returns true if the movedex contains all possible moves, and false if not.
fn is_complete(&self) -> bool {
self.complete
}
/// Creates a complete Movedex from the type_efficacy, moves_whole and move_flag_map databases
/// in the table folder.
pub fn new() -> Movedex {
// In the first step creates a vec with the effectivities for every type.
let mut effectivity = Vec::new();
let mut effective_db = csv::Reader::from_file("./src/db/tables/type_efficacy.csv").unwrap();
for record in effective_db.decode() {
let (off, def, factor): (i32, i32, u8) = record.unwrap();
effectivity.push((off, def, factor));
}
// Creates the main part with most simpel values and directly adds a Hash Map for the type
// efficiency of the move.
let mut moves = Vec::new();
let mut move_db = csv::Reader::from_file("./src/db/tables/moves_whole.csv").unwrap();
for record in move_db.decode() {
let mut move_tmp: Technique = record.unwrap();
let mut effective_hash = HashMap::new();
for entry in effectivity.clone() {
if entry.0 == move_tmp.get_type() as i32 && entry.2!= 100 {
let eff_id = match entry.2 {
0 => -4,
50 => -1,
200 => 1,
_ => unreachable!(),
};
effective_hash.insert(enums::Types::from_i32(entry.1).unwrap(), eff_id);
move_tmp.set_effectivity_map(effective_hash.clone());
}
}
moves.push(move_tmp);
}
// Adds all flags, that are valid for the moves.
let mut flags = Vec::new();
let mut last_id = 1;
let mut flag_db = csv::Reader::from_file("./src/db/tables/move_flag_map.csv").unwrap();
for record in flag_db.decode() {
let (id, identifier): (usize, i32) = record.unwrap();
if id < 617 {
if!(id == last_id) {
moves[last_id - 1].set_flags(flags);
last_id = id;
flags = Vec::new();
}
flags.push(enums::MoveFlags::from_i32(identifier).unwrap());
}
}
Movedex {
entries: moves,
complete: true,
}
}
}
|
Movedex
|
identifier_name
|
movedex.rs
|
extern crate csv;
use super::enums;
use super::moves::Technique;
use enum_primitive::FromPrimitive;
use std::collections::HashMap;
/// Manages the list of moves that are available. Contains a bool that is true whenever all
/// available moves are inside the entries to make an easier search possible.
/// By now the whole movedex contains 617 moves, which are nearly all moves from the main game
/// series. 4 Moves are missing due to missing data in the used database.
#[derive(Debug, Clone)]
pub struct Movedex {
entries: Vec<Technique>,
complete: bool,
}
// TODO: last 4 attacks are missing in move_meta.csv, therefore are not implemented right now.
// DB must be extended and if statements adjusted accordingly
impl Movedex {
/// Takes an ID and a movedex and returns an option with the move that can be find with the
/// given ID. Returns None if the ID isn't in the movedex.
pub fn move_by_id(&self, id: usize) -> Option<Technique> {
if id < 617 && self.is_complete() {
return Some(self.get_entries()[id - 1].clone());
} else if id < 617 {
for entry in self.entries.clone() {
if entry.get_id() == id {
return Some(entry);
}
}
}
|
}
/// Returns a list of all learnable moves by level for a specific pokemon with a specific
/// level.
pub fn for_token(&self, level: u16, id: usize) -> Movedex {
let mut new_dex = Vec::new();
let mut move_db = csv::Reader::from_file("./src/db/tables/pokemon_moves.csv").unwrap();
for record in move_db.decode() {
let (poke_id, version, move_id, _, move_level, _): (usize,
u8,
usize,
usize,
u16,
Option<usize>) = record.unwrap();
if move_id < 617 && move_level <= level && poke_id == id && version == 16 {
let move_tmp = self.move_by_id(move_id);
// ifs are needed to exclude unimplemented moves from the list
if move_tmp.clone().is_some() &&
!(move_tmp.clone().unwrap().get_name() == "counter" ||
move_tmp.clone().unwrap().get_name() == "bide" ||
move_tmp.clone().unwrap().get_name() == "mirror-coat" ||
move_tmp.clone().unwrap().get_name() == "spit-up" ||
move_tmp.clone().unwrap().get_name() == "natural-gift" ||
move_tmp.clone().unwrap().get_name() == "metal-burst" ||
move_tmp.clone().unwrap().get_name() == "fling" ||
move_tmp.clone().unwrap().get_name() == "trump-card" ||
move_tmp.clone().unwrap().get_name() == "me-first" ||
move_tmp.clone().unwrap().get_category() == enums::MoveCategory::Unique) ||
(move_tmp.clone().unwrap().get_category() == enums::MoveCategory::Unique &&
(move_tmp.clone().unwrap().get_name() == "teleport" ||
move_tmp.clone().unwrap().get_name() == "mimic" ||
move_tmp.clone().unwrap().get_name() == "metronome" ||
move_tmp.clone().unwrap().get_name() == "mirror-move" ||
move_tmp.clone().unwrap().get_name() == "nature-power" ||
move_tmp.clone().unwrap().get_name() == "splash" ||
//move_tmp.clone().unwrap().get_name() == "rest" ||
move_tmp.clone().unwrap().get_name() == "conversion" ||
move_tmp.clone().unwrap().get_name() == "spite" ||
move_tmp.clone().unwrap().get_name() == "sleep-talk" ||
move_tmp.clone().unwrap().get_name() == "celebrate" ||
move_tmp.clone().unwrap().get_name() == "powder" ||
move_tmp.clone().unwrap().get_name() == "reflect-type" ||
move_tmp.clone().unwrap().get_name() == "soak")) {
new_dex.push(self.move_by_id(move_id).unwrap());
}
}
}
new_dex.sort();
new_dex.dedup();
Movedex {
entries: new_dex,
complete: false,
}
}
/// Returns the entry field of a movedex.
pub fn get_entries(&self) -> Vec<Technique> {
self.entries.clone()
}
/// Returns true if the movedex contains all possible moves, and false if not.
fn is_complete(&self) -> bool {
self.complete
}
/// Creates a complete Movedex from the type_efficacy, moves_whole and move_flag_map databases
/// in the table folder.
pub fn new() -> Movedex {
// In the first step creates a vec with the effectivities for every type.
let mut effectivity = Vec::new();
let mut effective_db = csv::Reader::from_file("./src/db/tables/type_efficacy.csv").unwrap();
for record in effective_db.decode() {
let (off, def, factor): (i32, i32, u8) = record.unwrap();
effectivity.push((off, def, factor));
}
// Creates the main part with most simpel values and directly adds a Hash Map for the type
// efficiency of the move.
let mut moves = Vec::new();
let mut move_db = csv::Reader::from_file("./src/db/tables/moves_whole.csv").unwrap();
for record in move_db.decode() {
let mut move_tmp: Technique = record.unwrap();
let mut effective_hash = HashMap::new();
for entry in effectivity.clone() {
if entry.0 == move_tmp.get_type() as i32 && entry.2!= 100 {
let eff_id = match entry.2 {
0 => -4,
50 => -1,
200 => 1,
_ => unreachable!(),
};
effective_hash.insert(enums::Types::from_i32(entry.1).unwrap(), eff_id);
move_tmp.set_effectivity_map(effective_hash.clone());
}
}
moves.push(move_tmp);
}
// Adds all flags, that are valid for the moves.
let mut flags = Vec::new();
let mut last_id = 1;
let mut flag_db = csv::Reader::from_file("./src/db/tables/move_flag_map.csv").unwrap();
for record in flag_db.decode() {
let (id, identifier): (usize, i32) = record.unwrap();
if id < 617 {
if!(id == last_id) {
moves[last_id - 1].set_flags(flags);
last_id = id;
flags = Vec::new();
}
flags.push(enums::MoveFlags::from_i32(identifier).unwrap());
}
}
Movedex {
entries: moves,
complete: true,
}
}
}
|
None
|
random_line_split
|
new_task.rs
|
use lapin::{options::*, types::FieldTable, BasicProperties, Connection, ConnectionProperties};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let args: Vec<_> = std::env::args().skip(1).collect();
let message = match args.len() {
0 => b"hello".to_vec(),
_ => args.join(" ").into_bytes(),
};
let addr = "amqp://127.0.0.1:5672";
let conn = Connection::connect(addr, ConnectionProperties::default()).await?;
let channel = conn.create_channel().await?;
channel
.queue_declare(
"task_queue",
QueueDeclareOptions::default(),
FieldTable::default(),
)
.await?;
channel
.basic_publish(
"",
"task_queue",
BasicPublishOptions::default(),
message.clone(),
BasicProperties::default(),
)
.await?;
println!(" [x] Sent {:?}", std::str::from_utf8(&message)?);
conn.close(0, "").await?;
Ok(())
|
}
|
random_line_split
|
|
new_task.rs
|
use lapin::{options::*, types::FieldTable, BasicProperties, Connection, ConnectionProperties};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>>
|
.basic_publish(
"",
"task_queue",
BasicPublishOptions::default(),
message.clone(),
BasicProperties::default(),
)
.await?;
println!(" [x] Sent {:?}", std::str::from_utf8(&message)?);
conn.close(0, "").await?;
Ok(())
}
|
{
let args: Vec<_> = std::env::args().skip(1).collect();
let message = match args.len() {
0 => b"hello".to_vec(),
_ => args.join(" ").into_bytes(),
};
let addr = "amqp://127.0.0.1:5672";
let conn = Connection::connect(addr, ConnectionProperties::default()).await?;
let channel = conn.create_channel().await?;
channel
.queue_declare(
"task_queue",
QueueDeclareOptions::default(),
FieldTable::default(),
)
.await?;
channel
|
identifier_body
|
new_task.rs
|
use lapin::{options::*, types::FieldTable, BasicProperties, Connection, ConnectionProperties};
#[tokio::main]
async fn
|
() -> Result<(), Box<dyn std::error::Error>> {
let args: Vec<_> = std::env::args().skip(1).collect();
let message = match args.len() {
0 => b"hello".to_vec(),
_ => args.join(" ").into_bytes(),
};
let addr = "amqp://127.0.0.1:5672";
let conn = Connection::connect(addr, ConnectionProperties::default()).await?;
let channel = conn.create_channel().await?;
channel
.queue_declare(
"task_queue",
QueueDeclareOptions::default(),
FieldTable::default(),
)
.await?;
channel
.basic_publish(
"",
"task_queue",
BasicPublishOptions::default(),
message.clone(),
BasicProperties::default(),
)
.await?;
println!(" [x] Sent {:?}", std::str::from_utf8(&message)?);
conn.close(0, "").await?;
Ok(())
}
|
main
|
identifier_name
|
lib.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A bare-metal library supplying functions rustc may lower code to
//!
//! This library is not intended for general use, and is superseded by a system
//! libc if one is available. In a freestanding context, however, common
//! functions such as memset, memcpy, etc are not implemented. This library
//! provides an implementation of these functions which are either required by
//! libcore or called by rustc implicitly.
//!
//! This library is never included by default, and must be manually included if
//! necessary. It is an error to include this library when also linking with
//! the system libc library.
#![crate_id = "rlibc#0.11.0-pre"]
#![license = "MIT/ASL2"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://static.rust-lang.org/doc/master")]
#![no_std]
#![experimental]
// This library is definining the builtin functions, so it would be a shame for
// LLVM to optimize these function calls to themselves!
#![no_builtins]
#[cfg(test)] extern crate std;
#[cfg(test)] extern crate native;
// Require the offset intrinsics for LLVM to properly optimize the
// implementations below. If pointer arithmetic is done through integers the
// optimizations start to break down.
extern "rust-intrinsic" {
fn offset<T>(dst: *T, offset: int) -> *T;
}
#[no_mangle]
pub unsafe extern "C" fn memcpy(dest: *mut u8, src: *u8, n: uint) -> *mut u8 {
let mut i = 0;
while i < n {
|
#[no_mangle]
pub unsafe extern "C" fn memmove(dest: *mut u8, src: *u8, n: uint) -> *mut u8 {
if src < dest as *u8 { // copy from end
let mut i = n;
while i!= 0 {
i -= 1;
*(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int);
}
} else { // copy from beginning
let mut i = 0;
while i < n {
*(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int);
i += 1;
}
}
return dest;
}
#[no_mangle]
pub unsafe extern "C" fn memset(s: *mut u8, c: i32, n: uint) -> *mut u8 {
let mut i = 0;
while i < n {
*(offset(s as *u8, i as int) as *mut u8) = c as u8;
i += 1;
}
return s;
}
#[no_mangle]
pub unsafe extern "C" fn memcmp(s1: *u8, s2: *u8, n: uint) -> i32 {
let mut i = 0;
while i < n {
let a = *offset(s1, i as int);
let b = *offset(s2, i as int);
if a!= b {
return (a - b) as i32
}
i += 1;
}
return 0;
}
#[test] fn work_on_windows() { } // FIXME #10872 needed for a happy windows
|
*(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int);
i += 1;
}
return dest;
}
|
random_line_split
|
lib.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A bare-metal library supplying functions rustc may lower code to
//!
//! This library is not intended for general use, and is superseded by a system
//! libc if one is available. In a freestanding context, however, common
//! functions such as memset, memcpy, etc are not implemented. This library
//! provides an implementation of these functions which are either required by
//! libcore or called by rustc implicitly.
//!
//! This library is never included by default, and must be manually included if
//! necessary. It is an error to include this library when also linking with
//! the system libc library.
#![crate_id = "rlibc#0.11.0-pre"]
#![license = "MIT/ASL2"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://static.rust-lang.org/doc/master")]
#![no_std]
#![experimental]
// This library is definining the builtin functions, so it would be a shame for
// LLVM to optimize these function calls to themselves!
#![no_builtins]
#[cfg(test)] extern crate std;
#[cfg(test)] extern crate native;
// Require the offset intrinsics for LLVM to properly optimize the
// implementations below. If pointer arithmetic is done through integers the
// optimizations start to break down.
extern "rust-intrinsic" {
fn offset<T>(dst: *T, offset: int) -> *T;
}
#[no_mangle]
pub unsafe extern "C" fn memcpy(dest: *mut u8, src: *u8, n: uint) -> *mut u8 {
let mut i = 0;
while i < n {
*(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int);
i += 1;
}
return dest;
}
#[no_mangle]
pub unsafe extern "C" fn memmove(dest: *mut u8, src: *u8, n: uint) -> *mut u8 {
if src < dest as *u8 { // copy from end
let mut i = n;
while i!= 0 {
i -= 1;
*(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int);
}
} else
|
return dest;
}
#[no_mangle]
pub unsafe extern "C" fn memset(s: *mut u8, c: i32, n: uint) -> *mut u8 {
let mut i = 0;
while i < n {
*(offset(s as *u8, i as int) as *mut u8) = c as u8;
i += 1;
}
return s;
}
#[no_mangle]
pub unsafe extern "C" fn memcmp(s1: *u8, s2: *u8, n: uint) -> i32 {
let mut i = 0;
while i < n {
let a = *offset(s1, i as int);
let b = *offset(s2, i as int);
if a!= b {
return (a - b) as i32
}
i += 1;
}
return 0;
}
#[test] fn work_on_windows() { } // FIXME #10872 needed for a happy windows
|
{ // copy from beginning
let mut i = 0;
while i < n {
*(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int);
i += 1;
}
}
|
conditional_block
|
lib.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A bare-metal library supplying functions rustc may lower code to
//!
//! This library is not intended for general use, and is superseded by a system
//! libc if one is available. In a freestanding context, however, common
//! functions such as memset, memcpy, etc are not implemented. This library
//! provides an implementation of these functions which are either required by
//! libcore or called by rustc implicitly.
//!
//! This library is never included by default, and must be manually included if
//! necessary. It is an error to include this library when also linking with
//! the system libc library.
#![crate_id = "rlibc#0.11.0-pre"]
#![license = "MIT/ASL2"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://static.rust-lang.org/doc/master")]
#![no_std]
#![experimental]
// This library is definining the builtin functions, so it would be a shame for
// LLVM to optimize these function calls to themselves!
#![no_builtins]
#[cfg(test)] extern crate std;
#[cfg(test)] extern crate native;
// Require the offset intrinsics for LLVM to properly optimize the
// implementations below. If pointer arithmetic is done through integers the
// optimizations start to break down.
extern "rust-intrinsic" {
fn offset<T>(dst: *T, offset: int) -> *T;
}
#[no_mangle]
pub unsafe extern "C" fn memcpy(dest: *mut u8, src: *u8, n: uint) -> *mut u8 {
let mut i = 0;
while i < n {
*(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int);
i += 1;
}
return dest;
}
#[no_mangle]
pub unsafe extern "C" fn memmove(dest: *mut u8, src: *u8, n: uint) -> *mut u8 {
if src < dest as *u8 { // copy from end
let mut i = n;
while i!= 0 {
i -= 1;
*(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int);
}
} else { // copy from beginning
let mut i = 0;
while i < n {
*(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int);
i += 1;
}
}
return dest;
}
#[no_mangle]
pub unsafe extern "C" fn memset(s: *mut u8, c: i32, n: uint) -> *mut u8 {
let mut i = 0;
while i < n {
*(offset(s as *u8, i as int) as *mut u8) = c as u8;
i += 1;
}
return s;
}
#[no_mangle]
pub unsafe extern "C" fn memcmp(s1: *u8, s2: *u8, n: uint) -> i32
|
#[test] fn work_on_windows() { } // FIXME #10872 needed for a happy windows
|
{
let mut i = 0;
while i < n {
let a = *offset(s1, i as int);
let b = *offset(s2, i as int);
if a != b {
return (a - b) as i32
}
i += 1;
}
return 0;
}
|
identifier_body
|
lib.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A bare-metal library supplying functions rustc may lower code to
//!
//! This library is not intended for general use, and is superseded by a system
//! libc if one is available. In a freestanding context, however, common
//! functions such as memset, memcpy, etc are not implemented. This library
//! provides an implementation of these functions which are either required by
//! libcore or called by rustc implicitly.
//!
//! This library is never included by default, and must be manually included if
//! necessary. It is an error to include this library when also linking with
//! the system libc library.
#![crate_id = "rlibc#0.11.0-pre"]
#![license = "MIT/ASL2"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://static.rust-lang.org/doc/master")]
#![no_std]
#![experimental]
// This library is definining the builtin functions, so it would be a shame for
// LLVM to optimize these function calls to themselves!
#![no_builtins]
#[cfg(test)] extern crate std;
#[cfg(test)] extern crate native;
// Require the offset intrinsics for LLVM to properly optimize the
// implementations below. If pointer arithmetic is done through integers the
// optimizations start to break down.
extern "rust-intrinsic" {
fn offset<T>(dst: *T, offset: int) -> *T;
}
#[no_mangle]
pub unsafe extern "C" fn memcpy(dest: *mut u8, src: *u8, n: uint) -> *mut u8 {
let mut i = 0;
while i < n {
*(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int);
i += 1;
}
return dest;
}
#[no_mangle]
pub unsafe extern "C" fn
|
(dest: *mut u8, src: *u8, n: uint) -> *mut u8 {
if src < dest as *u8 { // copy from end
let mut i = n;
while i!= 0 {
i -= 1;
*(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int);
}
} else { // copy from beginning
let mut i = 0;
while i < n {
*(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int);
i += 1;
}
}
return dest;
}
#[no_mangle]
pub unsafe extern "C" fn memset(s: *mut u8, c: i32, n: uint) -> *mut u8 {
let mut i = 0;
while i < n {
*(offset(s as *u8, i as int) as *mut u8) = c as u8;
i += 1;
}
return s;
}
#[no_mangle]
pub unsafe extern "C" fn memcmp(s1: *u8, s2: *u8, n: uint) -> i32 {
let mut i = 0;
while i < n {
let a = *offset(s1, i as int);
let b = *offset(s2, i as int);
if a!= b {
return (a - b) as i32
}
i += 1;
}
return 0;
}
#[test] fn work_on_windows() { } // FIXME #10872 needed for a happy windows
|
memmove
|
identifier_name
|
clock.rs
|
use std::fmt;
use std::sync::{Arc, Condvar, Mutex};
use std::thread;
use std::time::{Duration, Instant};
pub struct Clock {
init: isize,
time_left: Arc<Mutex<isize>>,
running: Arc<Mutex<bool>>,
}
fn spawn_updater_thread(
time_left: Arc<Mutex<isize>>,
running: Arc<Mutex<bool>>,
main_signal: Arc<Condvar>,
) {
thread::spawn(move || loop {
if!*running.lock().unwrap() {
thread::sleep(Duration::from_millis(10));
continue;
}
if *time_left.lock().unwrap() <= 0 {
main_signal.notify_all();
}
|
});
}
impl Clock {
// starts stopped
pub fn new(init: isize, main_signal: Arc<Condvar>) -> Clock {
let time_left = Arc::new(Mutex::new(init));
let running = Arc::new(Mutex::new(false));
spawn_updater_thread(time_left.clone(), running.clone(), main_signal);
Clock {
init: init,
time_left: time_left,
running: running,
}
}
pub fn reset(&self) {
self.stop();
*self.time_left.lock().unwrap() = self.init;
}
pub fn set(&mut self, init: isize) {
self.init = init;
*self.time_left.lock().unwrap() = init;
}
pub fn correct(&mut self, to: isize) {
*self.time_left.lock().unwrap() = to;
}
pub fn is_zero(&self) -> bool {
*self.time_left.lock().unwrap() <= 0
}
pub fn stop(&self) {
*self.running.lock().unwrap() = false;
}
pub fn start(&self) {
*self.running.lock().unwrap() = true;
}
pub fn time_remaining(&self) -> isize {
*self.time_left.lock().unwrap()
}
}
impl fmt::Display for Clock {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let csecs = self.time_remaining();
let secs = csecs / 100;
if secs <= 0 {
let min = -secs / 60;
write!(f, "-{:01}:{:02}", min, (-secs) - min * 60)
} else {
let min = secs / 60;
write!(f, "{:01}:{:02}", min, secs - min * 60)
}
}
}
|
let sleep_start = Instant::now();
thread::sleep(Duration::from_millis(10));
let slept_for = Instant::now().duration_since(sleep_start);
*time_left.lock().unwrap() -= slept_for.subsec_nanos() as isize / 10000000;
|
random_line_split
|
clock.rs
|
use std::fmt;
use std::sync::{Arc, Condvar, Mutex};
use std::thread;
use std::time::{Duration, Instant};
pub struct Clock {
init: isize,
time_left: Arc<Mutex<isize>>,
running: Arc<Mutex<bool>>,
}
fn spawn_updater_thread(
time_left: Arc<Mutex<isize>>,
running: Arc<Mutex<bool>>,
main_signal: Arc<Condvar>,
) {
thread::spawn(move || loop {
if!*running.lock().unwrap()
|
if *time_left.lock().unwrap() <= 0 {
main_signal.notify_all();
}
let sleep_start = Instant::now();
thread::sleep(Duration::from_millis(10));
let slept_for = Instant::now().duration_since(sleep_start);
*time_left.lock().unwrap() -= slept_for.subsec_nanos() as isize / 10000000;
});
}
impl Clock {
// starts stopped
pub fn new(init: isize, main_signal: Arc<Condvar>) -> Clock {
let time_left = Arc::new(Mutex::new(init));
let running = Arc::new(Mutex::new(false));
spawn_updater_thread(time_left.clone(), running.clone(), main_signal);
Clock {
init: init,
time_left: time_left,
running: running,
}
}
pub fn reset(&self) {
self.stop();
*self.time_left.lock().unwrap() = self.init;
}
pub fn set(&mut self, init: isize) {
self.init = init;
*self.time_left.lock().unwrap() = init;
}
pub fn correct(&mut self, to: isize) {
*self.time_left.lock().unwrap() = to;
}
pub fn is_zero(&self) -> bool {
*self.time_left.lock().unwrap() <= 0
}
pub fn stop(&self) {
*self.running.lock().unwrap() = false;
}
pub fn start(&self) {
*self.running.lock().unwrap() = true;
}
pub fn time_remaining(&self) -> isize {
*self.time_left.lock().unwrap()
}
}
impl fmt::Display for Clock {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let csecs = self.time_remaining();
let secs = csecs / 100;
if secs <= 0 {
let min = -secs / 60;
write!(f, "-{:01}:{:02}", min, (-secs) - min * 60)
} else {
let min = secs / 60;
write!(f, "{:01}:{:02}", min, secs - min * 60)
}
}
}
|
{
thread::sleep(Duration::from_millis(10));
continue;
}
|
conditional_block
|
clock.rs
|
use std::fmt;
use std::sync::{Arc, Condvar, Mutex};
use std::thread;
use std::time::{Duration, Instant};
pub struct Clock {
init: isize,
time_left: Arc<Mutex<isize>>,
running: Arc<Mutex<bool>>,
}
fn spawn_updater_thread(
time_left: Arc<Mutex<isize>>,
running: Arc<Mutex<bool>>,
main_signal: Arc<Condvar>,
) {
thread::spawn(move || loop {
if!*running.lock().unwrap() {
thread::sleep(Duration::from_millis(10));
continue;
}
if *time_left.lock().unwrap() <= 0 {
main_signal.notify_all();
}
let sleep_start = Instant::now();
thread::sleep(Duration::from_millis(10));
let slept_for = Instant::now().duration_since(sleep_start);
*time_left.lock().unwrap() -= slept_for.subsec_nanos() as isize / 10000000;
});
}
impl Clock {
// starts stopped
pub fn new(init: isize, main_signal: Arc<Condvar>) -> Clock {
let time_left = Arc::new(Mutex::new(init));
let running = Arc::new(Mutex::new(false));
spawn_updater_thread(time_left.clone(), running.clone(), main_signal);
Clock {
init: init,
time_left: time_left,
running: running,
}
}
pub fn reset(&self) {
self.stop();
*self.time_left.lock().unwrap() = self.init;
}
pub fn set(&mut self, init: isize) {
self.init = init;
*self.time_left.lock().unwrap() = init;
}
pub fn correct(&mut self, to: isize) {
*self.time_left.lock().unwrap() = to;
}
pub fn is_zero(&self) -> bool {
*self.time_left.lock().unwrap() <= 0
}
pub fn stop(&self) {
*self.running.lock().unwrap() = false;
}
pub fn start(&self)
|
pub fn time_remaining(&self) -> isize {
*self.time_left.lock().unwrap()
}
}
impl fmt::Display for Clock {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let csecs = self.time_remaining();
let secs = csecs / 100;
if secs <= 0 {
let min = -secs / 60;
write!(f, "-{:01}:{:02}", min, (-secs) - min * 60)
} else {
let min = secs / 60;
write!(f, "{:01}:{:02}", min, secs - min * 60)
}
}
}
|
{
*self.running.lock().unwrap() = true;
}
|
identifier_body
|
clock.rs
|
use std::fmt;
use std::sync::{Arc, Condvar, Mutex};
use std::thread;
use std::time::{Duration, Instant};
pub struct Clock {
init: isize,
time_left: Arc<Mutex<isize>>,
running: Arc<Mutex<bool>>,
}
fn spawn_updater_thread(
time_left: Arc<Mutex<isize>>,
running: Arc<Mutex<bool>>,
main_signal: Arc<Condvar>,
) {
thread::spawn(move || loop {
if!*running.lock().unwrap() {
thread::sleep(Duration::from_millis(10));
continue;
}
if *time_left.lock().unwrap() <= 0 {
main_signal.notify_all();
}
let sleep_start = Instant::now();
thread::sleep(Duration::from_millis(10));
let slept_for = Instant::now().duration_since(sleep_start);
*time_left.lock().unwrap() -= slept_for.subsec_nanos() as isize / 10000000;
});
}
impl Clock {
// starts stopped
pub fn new(init: isize, main_signal: Arc<Condvar>) -> Clock {
let time_left = Arc::new(Mutex::new(init));
let running = Arc::new(Mutex::new(false));
spawn_updater_thread(time_left.clone(), running.clone(), main_signal);
Clock {
init: init,
time_left: time_left,
running: running,
}
}
pub fn reset(&self) {
self.stop();
*self.time_left.lock().unwrap() = self.init;
}
pub fn set(&mut self, init: isize) {
self.init = init;
*self.time_left.lock().unwrap() = init;
}
pub fn correct(&mut self, to: isize) {
*self.time_left.lock().unwrap() = to;
}
pub fn is_zero(&self) -> bool {
*self.time_left.lock().unwrap() <= 0
}
pub fn stop(&self) {
*self.running.lock().unwrap() = false;
}
pub fn start(&self) {
*self.running.lock().unwrap() = true;
}
pub fn time_remaining(&self) -> isize {
*self.time_left.lock().unwrap()
}
}
impl fmt::Display for Clock {
fn
|
(&self, f: &mut fmt::Formatter) -> fmt::Result {
let csecs = self.time_remaining();
let secs = csecs / 100;
if secs <= 0 {
let min = -secs / 60;
write!(f, "-{:01}:{:02}", min, (-secs) - min * 60)
} else {
let min = secs / 60;
write!(f, "{:01}:{:02}", min, secs - min * 60)
}
}
}
|
fmt
|
identifier_name
|
chario.rs
|
// Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Generic char output trait.
use core::str::{Str, StrSlice};
use core::slice::{Slice, ImmutableSlice};
use core::collections::Collection;
use core::iter::range;
use core::mem::zeroed;
use lib::strconv;
/// CharIO provides interface for outputting characters.
///
/// This trait implements the common functions to output strings and numbers,
/// requiring only one method: `putc`.
pub trait CharIO {
/// Outputs a character.
fn putc(&self, value: char);
/// Outputs a string.
fn puts(&self, s: &str) {
let chars : &[u8] = s.as_slice().as_bytes();
for i in range(0, s.len()) {
let c : char = chars[i] as char;
self.putc(c);
}
}
/// Outputs an integer with given base.
fn putint(&self, i: u32, base: u32) {
let mut buf : [u8,..32] = unsafe { zeroed() };
let bsl : &mut [u8] = buf;
strconv::itoa(i, bsl, base);
for &i in bsl.iter() {
if i == 0 {
break;
}
self.putc(i as char);
}
}
/// Outputs an integer.
fn puti(&self, i: u32) {
self.putint(i, 10);
}
/// Outputs an integer as a hex string.
fn puth(&self, i: u32) {
self.putint(i, 16);
}
}
#[cfg(test)]
pub mod test {
use core::cell::RefCell;
use drivers::chario::CharIO;
pub struct TestCharIOData {
last_char: char,
putc_calls: uint,
}
pub struct TestCharIO {
data: RefCell<TestCharIOData>
}
impl CharIO for TestCharIO {
fn putc(&self, value: char) {
let mut data = self.data.borrow_mut();
data.putc_calls += 1;
data.last_char = value;
}
}
impl TestCharIO {
pub fn
|
() -> TestCharIO {
TestCharIO {
data: RefCell::new(TestCharIOData {
last_char: '\0',
putc_calls: 0,
}),
}
}
fn get_last_char(&self) -> char {
self.data.borrow().last_char
}
fn get_and_reset_putc_calls(&self) -> uint {
let current = self.data.borrow().putc_calls;
self.data.borrow_mut().putc_calls = 0;
current
}
}
#[test]
fn putc_should_store_a_char() {
let io = TestCharIO::new();
io.putc('a');
assert!(io.get_last_char() == 'a');
io.putc('z');
assert!(io.get_last_char() == 'z');
}
#[test]
fn puti_should_store_a_number_as_char() {
let io = TestCharIO::new();
io.puti(3);
assert!(io.get_last_char() == '3');
io.puti(9);
assert!(io.get_last_char() == '9');
io.puti(10);
assert!(io.get_last_char() == '0');
io.puti(11);
assert!(io.get_last_char() == '1');
}
#[test]
fn puth_should_store_a_number_as_char() {
let io = TestCharIO::new();
io.puth(3);
assert!(io.get_last_char() == '3');
io.puth(9);
assert!(io.get_last_char() == '9');
io.puth(10);
assert!(io.get_last_char() == 'a');
io.puth(11);
assert!(io.get_last_char() == 'b');
io.puth(16);
assert!(io.get_last_char() == '0');
io.puth(17);
assert!(io.get_last_char() == '1');
}
#[test]
fn putint_should_work_with_different_bases() {
let io = TestCharIO::new();
io.putint(0, 2);
assert!(io.get_last_char() == '0');
io.putint(1, 2);
assert!(io.get_last_char() == '1');
io.putint(2, 2);
assert!(io.get_last_char() == '0');
io.putint(3, 2);
assert!(io.get_last_char() == '1');
io.putint(7, 7);
assert!(io.get_last_char() == '0');
io.putint(8, 7);
assert!(io.get_last_char() == '1');
io.putint(12, 7);
assert!(io.get_last_char() == '5');
io.putint(14, 7);
assert!(io.get_last_char() == '0');
}
#[test]
fn puts_should_leave_us_with_just_the_last_char() {
let io = TestCharIO::new();
io.puts("fu!");
assert!(io.get_last_char() == '!');
assert!(io.get_and_reset_putc_calls() == 3);
io.puts("\n\t");
assert!(io.get_last_char() == '\t');
assert!(io.get_and_reset_putc_calls() == 2);
}
}
|
new
|
identifier_name
|
chario.rs
|
// Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Generic char output trait.
use core::str::{Str, StrSlice};
use core::slice::{Slice, ImmutableSlice};
use core::collections::Collection;
use core::iter::range;
use core::mem::zeroed;
use lib::strconv;
/// CharIO provides interface for outputting characters.
///
/// This trait implements the common functions to output strings and numbers,
/// requiring only one method: `putc`.
pub trait CharIO {
/// Outputs a character.
fn putc(&self, value: char);
/// Outputs a string.
fn puts(&self, s: &str) {
let chars : &[u8] = s.as_slice().as_bytes();
for i in range(0, s.len()) {
let c : char = chars[i] as char;
self.putc(c);
}
}
/// Outputs an integer with given base.
fn putint(&self, i: u32, base: u32) {
let mut buf : [u8,..32] = unsafe { zeroed() };
let bsl : &mut [u8] = buf;
strconv::itoa(i, bsl, base);
for &i in bsl.iter() {
if i == 0
|
self.putc(i as char);
}
}
/// Outputs an integer.
fn puti(&self, i: u32) {
self.putint(i, 10);
}
/// Outputs an integer as a hex string.
fn puth(&self, i: u32) {
self.putint(i, 16);
}
}
#[cfg(test)]
pub mod test {
use core::cell::RefCell;
use drivers::chario::CharIO;
pub struct TestCharIOData {
last_char: char,
putc_calls: uint,
}
pub struct TestCharIO {
data: RefCell<TestCharIOData>
}
impl CharIO for TestCharIO {
fn putc(&self, value: char) {
let mut data = self.data.borrow_mut();
data.putc_calls += 1;
data.last_char = value;
}
}
impl TestCharIO {
pub fn new() -> TestCharIO {
TestCharIO {
data: RefCell::new(TestCharIOData {
last_char: '\0',
putc_calls: 0,
}),
}
}
fn get_last_char(&self) -> char {
self.data.borrow().last_char
}
fn get_and_reset_putc_calls(&self) -> uint {
let current = self.data.borrow().putc_calls;
self.data.borrow_mut().putc_calls = 0;
current
}
}
#[test]
fn putc_should_store_a_char() {
let io = TestCharIO::new();
io.putc('a');
assert!(io.get_last_char() == 'a');
io.putc('z');
assert!(io.get_last_char() == 'z');
}
#[test]
fn puti_should_store_a_number_as_char() {
let io = TestCharIO::new();
io.puti(3);
assert!(io.get_last_char() == '3');
io.puti(9);
assert!(io.get_last_char() == '9');
io.puti(10);
assert!(io.get_last_char() == '0');
io.puti(11);
assert!(io.get_last_char() == '1');
}
#[test]
fn puth_should_store_a_number_as_char() {
let io = TestCharIO::new();
io.puth(3);
assert!(io.get_last_char() == '3');
io.puth(9);
assert!(io.get_last_char() == '9');
io.puth(10);
assert!(io.get_last_char() == 'a');
io.puth(11);
assert!(io.get_last_char() == 'b');
io.puth(16);
assert!(io.get_last_char() == '0');
io.puth(17);
assert!(io.get_last_char() == '1');
}
#[test]
fn putint_should_work_with_different_bases() {
let io = TestCharIO::new();
io.putint(0, 2);
assert!(io.get_last_char() == '0');
io.putint(1, 2);
assert!(io.get_last_char() == '1');
io.putint(2, 2);
assert!(io.get_last_char() == '0');
io.putint(3, 2);
assert!(io.get_last_char() == '1');
io.putint(7, 7);
assert!(io.get_last_char() == '0');
io.putint(8, 7);
assert!(io.get_last_char() == '1');
io.putint(12, 7);
assert!(io.get_last_char() == '5');
io.putint(14, 7);
assert!(io.get_last_char() == '0');
}
#[test]
fn puts_should_leave_us_with_just_the_last_char() {
let io = TestCharIO::new();
io.puts("fu!");
assert!(io.get_last_char() == '!');
assert!(io.get_and_reset_putc_calls() == 3);
io.puts("\n\t");
assert!(io.get_last_char() == '\t');
assert!(io.get_and_reset_putc_calls() == 2);
}
}
|
{
break;
}
|
conditional_block
|
chario.rs
|
// Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Generic char output trait.
use core::str::{Str, StrSlice};
use core::slice::{Slice, ImmutableSlice};
use core::collections::Collection;
use core::iter::range;
use core::mem::zeroed;
use lib::strconv;
/// CharIO provides interface for outputting characters.
///
/// This trait implements the common functions to output strings and numbers,
/// requiring only one method: `putc`.
pub trait CharIO {
/// Outputs a character.
fn putc(&self, value: char);
/// Outputs a string.
fn puts(&self, s: &str) {
let chars : &[u8] = s.as_slice().as_bytes();
for i in range(0, s.len()) {
let c : char = chars[i] as char;
self.putc(c);
}
}
/// Outputs an integer with given base.
fn putint(&self, i: u32, base: u32) {
let mut buf : [u8,..32] = unsafe { zeroed() };
let bsl : &mut [u8] = buf;
strconv::itoa(i, bsl, base);
for &i in bsl.iter() {
if i == 0 {
break;
}
self.putc(i as char);
}
}
/// Outputs an integer.
fn puti(&self, i: u32) {
self.putint(i, 10);
}
/// Outputs an integer as a hex string.
fn puth(&self, i: u32) {
self.putint(i, 16);
}
}
#[cfg(test)]
pub mod test {
use core::cell::RefCell;
use drivers::chario::CharIO;
pub struct TestCharIOData {
last_char: char,
putc_calls: uint,
}
pub struct TestCharIO {
data: RefCell<TestCharIOData>
}
impl CharIO for TestCharIO {
fn putc(&self, value: char) {
let mut data = self.data.borrow_mut();
data.putc_calls += 1;
|
pub fn new() -> TestCharIO {
TestCharIO {
data: RefCell::new(TestCharIOData {
last_char: '\0',
putc_calls: 0,
}),
}
}
fn get_last_char(&self) -> char {
self.data.borrow().last_char
}
fn get_and_reset_putc_calls(&self) -> uint {
let current = self.data.borrow().putc_calls;
self.data.borrow_mut().putc_calls = 0;
current
}
}
#[test]
fn putc_should_store_a_char() {
let io = TestCharIO::new();
io.putc('a');
assert!(io.get_last_char() == 'a');
io.putc('z');
assert!(io.get_last_char() == 'z');
}
#[test]
fn puti_should_store_a_number_as_char() {
let io = TestCharIO::new();
io.puti(3);
assert!(io.get_last_char() == '3');
io.puti(9);
assert!(io.get_last_char() == '9');
io.puti(10);
assert!(io.get_last_char() == '0');
io.puti(11);
assert!(io.get_last_char() == '1');
}
#[test]
fn puth_should_store_a_number_as_char() {
let io = TestCharIO::new();
io.puth(3);
assert!(io.get_last_char() == '3');
io.puth(9);
assert!(io.get_last_char() == '9');
io.puth(10);
assert!(io.get_last_char() == 'a');
io.puth(11);
assert!(io.get_last_char() == 'b');
io.puth(16);
assert!(io.get_last_char() == '0');
io.puth(17);
assert!(io.get_last_char() == '1');
}
#[test]
fn putint_should_work_with_different_bases() {
let io = TestCharIO::new();
io.putint(0, 2);
assert!(io.get_last_char() == '0');
io.putint(1, 2);
assert!(io.get_last_char() == '1');
io.putint(2, 2);
assert!(io.get_last_char() == '0');
io.putint(3, 2);
assert!(io.get_last_char() == '1');
io.putint(7, 7);
assert!(io.get_last_char() == '0');
io.putint(8, 7);
assert!(io.get_last_char() == '1');
io.putint(12, 7);
assert!(io.get_last_char() == '5');
io.putint(14, 7);
assert!(io.get_last_char() == '0');
}
#[test]
fn puts_should_leave_us_with_just_the_last_char() {
let io = TestCharIO::new();
io.puts("fu!");
assert!(io.get_last_char() == '!');
assert!(io.get_and_reset_putc_calls() == 3);
io.puts("\n\t");
assert!(io.get_last_char() == '\t');
assert!(io.get_and_reset_putc_calls() == 2);
}
}
|
data.last_char = value;
}
}
impl TestCharIO {
|
random_line_split
|
chario.rs
|
// Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Generic char output trait.
use core::str::{Str, StrSlice};
use core::slice::{Slice, ImmutableSlice};
use core::collections::Collection;
use core::iter::range;
use core::mem::zeroed;
use lib::strconv;
/// CharIO provides interface for outputting characters.
///
/// This trait implements the common functions to output strings and numbers,
/// requiring only one method: `putc`.
pub trait CharIO {
/// Outputs a character.
fn putc(&self, value: char);
/// Outputs a string.
fn puts(&self, s: &str) {
let chars : &[u8] = s.as_slice().as_bytes();
for i in range(0, s.len()) {
let c : char = chars[i] as char;
self.putc(c);
}
}
/// Outputs an integer with given base.
fn putint(&self, i: u32, base: u32) {
let mut buf : [u8,..32] = unsafe { zeroed() };
let bsl : &mut [u8] = buf;
strconv::itoa(i, bsl, base);
for &i in bsl.iter() {
if i == 0 {
break;
}
self.putc(i as char);
}
}
/// Outputs an integer.
fn puti(&self, i: u32) {
self.putint(i, 10);
}
/// Outputs an integer as a hex string.
fn puth(&self, i: u32) {
self.putint(i, 16);
}
}
#[cfg(test)]
pub mod test {
use core::cell::RefCell;
use drivers::chario::CharIO;
pub struct TestCharIOData {
last_char: char,
putc_calls: uint,
}
pub struct TestCharIO {
data: RefCell<TestCharIOData>
}
impl CharIO for TestCharIO {
fn putc(&self, value: char) {
let mut data = self.data.borrow_mut();
data.putc_calls += 1;
data.last_char = value;
}
}
impl TestCharIO {
pub fn new() -> TestCharIO {
TestCharIO {
data: RefCell::new(TestCharIOData {
last_char: '\0',
putc_calls: 0,
}),
}
}
fn get_last_char(&self) -> char {
self.data.borrow().last_char
}
fn get_and_reset_putc_calls(&self) -> uint {
let current = self.data.borrow().putc_calls;
self.data.borrow_mut().putc_calls = 0;
current
}
}
#[test]
fn putc_should_store_a_char()
|
#[test]
fn puti_should_store_a_number_as_char() {
let io = TestCharIO::new();
io.puti(3);
assert!(io.get_last_char() == '3');
io.puti(9);
assert!(io.get_last_char() == '9');
io.puti(10);
assert!(io.get_last_char() == '0');
io.puti(11);
assert!(io.get_last_char() == '1');
}
#[test]
fn puth_should_store_a_number_as_char() {
let io = TestCharIO::new();
io.puth(3);
assert!(io.get_last_char() == '3');
io.puth(9);
assert!(io.get_last_char() == '9');
io.puth(10);
assert!(io.get_last_char() == 'a');
io.puth(11);
assert!(io.get_last_char() == 'b');
io.puth(16);
assert!(io.get_last_char() == '0');
io.puth(17);
assert!(io.get_last_char() == '1');
}
#[test]
fn putint_should_work_with_different_bases() {
let io = TestCharIO::new();
io.putint(0, 2);
assert!(io.get_last_char() == '0');
io.putint(1, 2);
assert!(io.get_last_char() == '1');
io.putint(2, 2);
assert!(io.get_last_char() == '0');
io.putint(3, 2);
assert!(io.get_last_char() == '1');
io.putint(7, 7);
assert!(io.get_last_char() == '0');
io.putint(8, 7);
assert!(io.get_last_char() == '1');
io.putint(12, 7);
assert!(io.get_last_char() == '5');
io.putint(14, 7);
assert!(io.get_last_char() == '0');
}
#[test]
fn puts_should_leave_us_with_just_the_last_char() {
let io = TestCharIO::new();
io.puts("fu!");
assert!(io.get_last_char() == '!');
assert!(io.get_and_reset_putc_calls() == 3);
io.puts("\n\t");
assert!(io.get_last_char() == '\t');
assert!(io.get_and_reset_putc_calls() == 2);
}
}
|
{
let io = TestCharIO::new();
io.putc('a');
assert!(io.get_last_char() == 'a');
io.putc('z');
assert!(io.get_last_char() == 'z');
}
|
identifier_body
|
dst-index.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that overloaded index expressions with DST result types
// can't be used as rvalues
use std::ops::Index;
use std::fmt::Show;
struct S;
impl Copy for S {}
impl Index<uint, str> for S {
fn index<'a>(&'a self, _: &uint) -> &'a str
|
}
struct T;
impl Copy for T {}
impl Index<uint, Show +'static> for T {
fn index<'a>(&'a self, idx: &uint) -> &'a (Show +'static) {
static x: uint = 42;
&x
}
}
fn main() {
S[0];
//~^ ERROR cannot move out of dereference
//~^^ ERROR E0161
T[0];
//~^ ERROR cannot move out of dereference
//~^^ ERROR E0161
}
|
{
"hello"
}
|
identifier_body
|
dst-index.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that overloaded index expressions with DST result types
// can't be used as rvalues
use std::ops::Index;
use std::fmt::Show;
struct S;
impl Copy for S {}
impl Index<uint, str> for S {
fn index<'a>(&'a self, _: &uint) -> &'a str {
"hello"
}
}
|
impl Copy for T {}
impl Index<uint, Show +'static> for T {
fn index<'a>(&'a self, idx: &uint) -> &'a (Show +'static) {
static x: uint = 42;
&x
}
}
fn main() {
S[0];
//~^ ERROR cannot move out of dereference
//~^^ ERROR E0161
T[0];
//~^ ERROR cannot move out of dereference
//~^^ ERROR E0161
}
|
struct T;
|
random_line_split
|
dst-index.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that overloaded index expressions with DST result types
// can't be used as rvalues
use std::ops::Index;
use std::fmt::Show;
struct S;
impl Copy for S {}
impl Index<uint, str> for S {
fn
|
<'a>(&'a self, _: &uint) -> &'a str {
"hello"
}
}
struct T;
impl Copy for T {}
impl Index<uint, Show +'static> for T {
fn index<'a>(&'a self, idx: &uint) -> &'a (Show +'static) {
static x: uint = 42;
&x
}
}
fn main() {
S[0];
//~^ ERROR cannot move out of dereference
//~^^ ERROR E0161
T[0];
//~^ ERROR cannot move out of dereference
//~^^ ERROR E0161
}
|
index
|
identifier_name
|
status.rs
|
// Copyright 2017 LambdaStack All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatus {
health: CephStatusHealth,
fsid: String,
election_epoch: u32,
quorum: Vec<u32>,
quorum_names: Vec<String>,
monmap: CephStatusMonMap,
osdmap: CephStatusOSDMapH,
pgmap: CephStatusPGMap,
mdsmap: CephStatusMDSMap,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealth {
health: CephStatusHealth2,
timechecks: CephStatusHealthTimeChecks,
summary: Vec<CephStatusHealthSummary>,
overall_status: String,
detail: Vec<CephStatusHealthDetail>,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealth2 {
health: Vec<CephStatusHealthServices>,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealthServices {
mons: Vec<CephStatusHealthServicesMon>,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealthServicesMon {
name: String,
kb_total: u32,
kb_used: u32,
kb_avail: u32,
avail_percent: u16,
last_updated: String,
store_stats: CephStatusHealthServicesMonStats,
health: String,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealthServicesMonStats {
bytes_total: u64,
bytes_sst: u64,
bytes_log: u64,
bytes_misc: u64,
last_updated: String,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealthTimeChecks {
epoch: u32,
round: u32,
round_status: String,
mons: Vec<CephStatusHealthMons>,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealthMons {
name: String,
skew: f32,
latency: f32,
health: String,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct
|
{
severity: String,
summary: String,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealthDetail {
dummy: String,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusMonMap {
epoch: u32,
fsid: String,
modified: String,
created: String,
mons: Vec<CephStatusMonRank>,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusMonRank {
rank: u16,
name: String,
addr: String,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusOSDMapH {
osdmap: CephStatusOSDMapL,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusOSDMapL {
epoch: u32,
num_osds: u32,
num_up_osds: u32,
num_in_osds: u32,
full: bool,
nearfull: bool,
num_remapped_pgs: u32,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusPGMap {
pgs_by_state: Vec<CephStatusPGState>,
version: u32,
num_pgs: u32,
data_bytes: u64,
bytes_used: u64,
bytes_avail: u64,
bytes_total: u64,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusPGState {
state_name: String,
count: u32,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusMDSMap {
epoch: u32,
up: u32,
_in: u32,
max: u32,
by_rank: Vec<CephStatusMDSRank>,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusMDSRank {
rank: u16,
name: String,
addr: String,
}
|
CephStatusHealthSummary
|
identifier_name
|
status.rs
|
// Copyright 2017 LambdaStack All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatus {
health: CephStatusHealth,
fsid: String,
election_epoch: u32,
quorum: Vec<u32>,
quorum_names: Vec<String>,
monmap: CephStatusMonMap,
osdmap: CephStatusOSDMapH,
pgmap: CephStatusPGMap,
mdsmap: CephStatusMDSMap,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealth {
health: CephStatusHealth2,
timechecks: CephStatusHealthTimeChecks,
summary: Vec<CephStatusHealthSummary>,
overall_status: String,
detail: Vec<CephStatusHealthDetail>,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealth2 {
health: Vec<CephStatusHealthServices>,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealthServices {
mons: Vec<CephStatusHealthServicesMon>,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealthServicesMon {
name: String,
kb_total: u32,
kb_used: u32,
kb_avail: u32,
avail_percent: u16,
last_updated: String,
store_stats: CephStatusHealthServicesMonStats,
health: String,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealthServicesMonStats {
bytes_total: u64,
bytes_sst: u64,
bytes_log: u64,
bytes_misc: u64,
last_updated: String,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealthTimeChecks {
epoch: u32,
round: u32,
round_status: String,
mons: Vec<CephStatusHealthMons>,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealthMons {
name: String,
skew: f32,
latency: f32,
health: String,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealthSummary {
severity: String,
summary: String,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusHealthDetail {
dummy: String,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusMonMap {
epoch: u32,
fsid: String,
modified: String,
|
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusMonRank {
rank: u16,
name: String,
addr: String,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusOSDMapH {
osdmap: CephStatusOSDMapL,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusOSDMapL {
epoch: u32,
num_osds: u32,
num_up_osds: u32,
num_in_osds: u32,
full: bool,
nearfull: bool,
num_remapped_pgs: u32,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusPGMap {
pgs_by_state: Vec<CephStatusPGState>,
version: u32,
num_pgs: u32,
data_bytes: u64,
bytes_used: u64,
bytes_avail: u64,
bytes_total: u64,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusPGState {
state_name: String,
count: u32,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusMDSMap {
epoch: u32,
up: u32,
_in: u32,
max: u32,
by_rank: Vec<CephStatusMDSRank>,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct CephStatusMDSRank {
rank: u16,
name: String,
addr: String,
}
|
created: String,
mons: Vec<CephStatusMonRank>,
|
random_line_split
|
mut_from_ref.rs
|
#![allow(unused)]
#![warn(clippy::mut_from_ref)]
struct Foo;
impl Foo {
fn this_wont_hurt_a_bit(&self) -> &mut Foo {
unimplemented!()
}
}
trait Ouch {
fn ouch(x: &Foo) -> &mut Foo;
}
impl Ouch for Foo {
fn ouch(x: &Foo) -> &mut Foo {
unimplemented!()
}
}
fn fail(x: &u32) -> &mut u16 {
unimplemented!()
}
fn fail_lifetime<'a>(x: &'a u32, y: &mut u32) -> &'a mut u32 {
unimplemented!()
}
fn fail_double<'a, 'b>(x: &'a u32, y: &'a u32, z: &'b mut u32) -> &'a mut u32 {
unimplemented!()
}
// this is OK, because the result borrows y
fn works<'a>(x: &u32, y: &'a mut u32) -> &'a mut u32 {
unimplemented!()
}
// this is also OK, because the result could borrow y
fn also_works<'a>(x: &'a u32, y: &'a mut u32) -> &'a mut u32 {
unimplemented!()
}
fn main()
|
{
//TODO
}
|
identifier_body
|
|
mut_from_ref.rs
|
#![allow(unused)]
#![warn(clippy::mut_from_ref)]
struct Foo;
impl Foo {
fn this_wont_hurt_a_bit(&self) -> &mut Foo {
unimplemented!()
}
}
trait Ouch {
fn ouch(x: &Foo) -> &mut Foo;
}
|
impl Ouch for Foo {
fn ouch(x: &Foo) -> &mut Foo {
unimplemented!()
}
}
fn fail(x: &u32) -> &mut u16 {
unimplemented!()
}
fn fail_lifetime<'a>(x: &'a u32, y: &mut u32) -> &'a mut u32 {
unimplemented!()
}
fn fail_double<'a, 'b>(x: &'a u32, y: &'a u32, z: &'b mut u32) -> &'a mut u32 {
unimplemented!()
}
// this is OK, because the result borrows y
fn works<'a>(x: &u32, y: &'a mut u32) -> &'a mut u32 {
unimplemented!()
}
// this is also OK, because the result could borrow y
fn also_works<'a>(x: &'a u32, y: &'a mut u32) -> &'a mut u32 {
unimplemented!()
}
fn main() {
//TODO
}
|
random_line_split
|
|
mut_from_ref.rs
|
#![allow(unused)]
#![warn(clippy::mut_from_ref)]
struct
|
;
impl Foo {
fn this_wont_hurt_a_bit(&self) -> &mut Foo {
unimplemented!()
}
}
trait Ouch {
fn ouch(x: &Foo) -> &mut Foo;
}
impl Ouch for Foo {
fn ouch(x: &Foo) -> &mut Foo {
unimplemented!()
}
}
fn fail(x: &u32) -> &mut u16 {
unimplemented!()
}
fn fail_lifetime<'a>(x: &'a u32, y: &mut u32) -> &'a mut u32 {
unimplemented!()
}
fn fail_double<'a, 'b>(x: &'a u32, y: &'a u32, z: &'b mut u32) -> &'a mut u32 {
unimplemented!()
}
// this is OK, because the result borrows y
fn works<'a>(x: &u32, y: &'a mut u32) -> &'a mut u32 {
unimplemented!()
}
// this is also OK, because the result could borrow y
fn also_works<'a>(x: &'a u32, y: &'a mut u32) -> &'a mut u32 {
unimplemented!()
}
fn main() {
//TODO
}
|
Foo
|
identifier_name
|
issue-3563-3.rs
|
// run-pass
#![allow(unused_imports)]
#![allow(non_snake_case)]
// ASCII art shape renderer. Demonstrates traits, impls, operator overloading,
// non-copyable struct, unit testing. To run execute: rustc --test shapes.rs &&
//./shapes
// Rust's std library is tightly bound to the language itself so it is
// automatically linked in. However the extra library is designed to be
// optional (for code that must run on constrained environments like embedded
// devices or special environments like kernel code) so it must be explicitly
// linked in.
// Extern mod controls linkage. Use controls the visibility of names to modules
// that are already linked in. Using WriterUtil allows us to use the write_line
// method.
use std::fmt;
use std::iter::repeat;
use std::slice;
// Represents a position on a canvas.
#[derive(Copy, Clone)]
struct Point {
x: isize,
y: isize,
}
// Represents an offset on a canvas. (This has the same structure as a Point.
// but different semantics).
#[derive(Copy, Clone)]
struct Size {
width: isize,
height: isize,
}
#[derive(Copy, Clone)]
struct Rect {
top_left: Point,
size: Size,
}
// Contains the information needed to do shape rendering via ASCII art.
struct AsciiArt {
width: usize,
height: usize,
fill: char,
lines: Vec<Vec<char> >,
// This struct can be quite large so we'll disable copying: developers need
// to either pass these structs around via references or move them.
}
impl Drop for AsciiArt {
fn drop(&mut self) {}
}
// It's common to define a constructor sort of function to create struct instances.
// If there is a canonical constructor it is typically named the same as the type.
// Other constructor sort of functions are typically named from_foo, from_bar, etc.
fn AsciiArt(width: usize, height: usize, fill: char) -> AsciiArt {
// Build a vector of vectors containing blank characters for each position in
// our canvas.
let lines = vec![vec!['.'; width]; height];
// Rust code often returns values by omitting the trailing semi-colon
// instead of using an explicit return statement.
AsciiArt {width: width, height: height, fill: fill, lines: lines}
}
// Methods particular to the AsciiArt struct.
impl AsciiArt {
fn add_pt(&mut self, x: isize, y: isize) {
if x >= 0 && x < self.width as isize {
if y >= 0 && y < self.height as isize {
// Note that numeric types don't implicitly convert to each other.
let v = y as usize;
let h = x as usize;
// Vector subscripting will normally copy the element, but &v[i]
// will return a reference which is what we need because the
// element is:
// 1) potentially large
// 2) needs to be modified
let row = &mut self.lines[v];
row[h] = self.fill;
}
}
}
}
// Allows AsciiArt to be converted to a string using the libcore ToString trait.
// Note that the %s fmt! specifier will not call this automatically.
impl fmt::Display for AsciiArt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Convert each line into a string.
let lines = self.lines.iter()
.map(|line| line.iter().cloned().collect())
.collect::<Vec<String>>();
// Concatenate the lines together using a new-line.
write!(f, "{}", lines.join("\n"))
}
}
// This is similar to an interface in other languages: it defines a protocol which
// developers can implement for arbitrary concrete types.
trait Canvas {
fn add_point(&mut self, shape: Point);
fn add_rect(&mut self, shape: Rect);
// Unlike interfaces traits support default implementations.
// Got an ICE as soon as I added this method.
fn add_points(&mut self, shapes: &[Point]) {
for pt in shapes {self.add_point(*pt)};
}
}
// Here we provide an implementation of the Canvas methods for AsciiArt.
// Other implementations could also be provided (e.g., for PDF or Apple's Quartz)
// and code can use them polymorphically via the Canvas trait.
impl Canvas for AsciiArt {
fn add_point(&mut self, shape: Point) {
self.add_pt(shape.x, shape.y);
}
fn add_rect(&mut self, shape: Rect) {
// Add the top and bottom lines.
for x in shape.top_left.x..shape.top_left.x + shape.size.width {
self.add_pt(x, shape.top_left.y);
self.add_pt(x, shape.top_left.y + shape.size.height - 1);
}
// Add the left and right lines.
for y in shape.top_left.y..shape.top_left.y + shape.size.height {
self.add_pt(shape.top_left.x, y);
self.add_pt(shape.top_left.x + shape.size.width - 1, y);
}
}
}
// Rust's unit testing framework is currently a bit under-developed so we'll use
// this little helper.
pub fn check_strs(actual: &str, expected: &str) -> bool {
if actual!= expected
|
return true;
}
fn test_ascii_art_ctor() {
let art = AsciiArt(3, 3, '*');
assert!(check_strs(&art.to_string(), "...\n...\n..."));
}
fn test_add_pt() {
let mut art = AsciiArt(3, 3, '*');
art.add_pt(0, 0);
art.add_pt(0, -10);
art.add_pt(1, 2);
assert!(check_strs(&art.to_string(), "*..\n...\n.*."));
}
fn test_shapes() {
let mut art = AsciiArt(4, 4, '*');
art.add_rect(Rect {top_left: Point {x: 0, y: 0}, size: Size {width: 4, height: 4}});
art.add_point(Point {x: 2, y: 2});
assert!(check_strs(&art.to_string(), "****\n*..*\n*.**\n****"));
}
pub fn main() {
test_ascii_art_ctor();
test_add_pt();
test_shapes();
}
|
{
println!("Found:\n{}\nbut expected\n{}", actual, expected);
return false;
}
|
conditional_block
|
issue-3563-3.rs
|
// run-pass
#![allow(unused_imports)]
#![allow(non_snake_case)]
// ASCII art shape renderer. Demonstrates traits, impls, operator overloading,
// non-copyable struct, unit testing. To run execute: rustc --test shapes.rs &&
//./shapes
// Rust's std library is tightly bound to the language itself so it is
// automatically linked in. However the extra library is designed to be
// optional (for code that must run on constrained environments like embedded
// devices or special environments like kernel code) so it must be explicitly
// linked in.
// Extern mod controls linkage. Use controls the visibility of names to modules
// that are already linked in. Using WriterUtil allows us to use the write_line
// method.
use std::fmt;
use std::iter::repeat;
use std::slice;
// Represents a position on a canvas.
#[derive(Copy, Clone)]
struct Point {
x: isize,
y: isize,
}
// Represents an offset on a canvas. (This has the same structure as a Point.
// but different semantics).
#[derive(Copy, Clone)]
struct Size {
width: isize,
height: isize,
}
#[derive(Copy, Clone)]
struct Rect {
top_left: Point,
size: Size,
}
// Contains the information needed to do shape rendering via ASCII art.
struct AsciiArt {
width: usize,
|
height: usize,
fill: char,
lines: Vec<Vec<char> >,
// This struct can be quite large so we'll disable copying: developers need
// to either pass these structs around via references or move them.
}
impl Drop for AsciiArt {
fn drop(&mut self) {}
}
// It's common to define a constructor sort of function to create struct instances.
// If there is a canonical constructor it is typically named the same as the type.
// Other constructor sort of functions are typically named from_foo, from_bar, etc.
fn AsciiArt(width: usize, height: usize, fill: char) -> AsciiArt {
// Build a vector of vectors containing blank characters for each position in
// our canvas.
let lines = vec![vec!['.'; width]; height];
// Rust code often returns values by omitting the trailing semi-colon
// instead of using an explicit return statement.
AsciiArt {width: width, height: height, fill: fill, lines: lines}
}
// Methods particular to the AsciiArt struct.
impl AsciiArt {
fn add_pt(&mut self, x: isize, y: isize) {
if x >= 0 && x < self.width as isize {
if y >= 0 && y < self.height as isize {
// Note that numeric types don't implicitly convert to each other.
let v = y as usize;
let h = x as usize;
// Vector subscripting will normally copy the element, but &v[i]
// will return a reference which is what we need because the
// element is:
// 1) potentially large
// 2) needs to be modified
let row = &mut self.lines[v];
row[h] = self.fill;
}
}
}
}
// Allows AsciiArt to be converted to a string using the libcore ToString trait.
// Note that the %s fmt! specifier will not call this automatically.
impl fmt::Display for AsciiArt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Convert each line into a string.
let lines = self.lines.iter()
.map(|line| line.iter().cloned().collect())
.collect::<Vec<String>>();
// Concatenate the lines together using a new-line.
write!(f, "{}", lines.join("\n"))
}
}
// This is similar to an interface in other languages: it defines a protocol which
// developers can implement for arbitrary concrete types.
trait Canvas {
fn add_point(&mut self, shape: Point);
fn add_rect(&mut self, shape: Rect);
// Unlike interfaces traits support default implementations.
// Got an ICE as soon as I added this method.
fn add_points(&mut self, shapes: &[Point]) {
for pt in shapes {self.add_point(*pt)};
}
}
// Here we provide an implementation of the Canvas methods for AsciiArt.
// Other implementations could also be provided (e.g., for PDF or Apple's Quartz)
// and code can use them polymorphically via the Canvas trait.
impl Canvas for AsciiArt {
fn add_point(&mut self, shape: Point) {
self.add_pt(shape.x, shape.y);
}
fn add_rect(&mut self, shape: Rect) {
// Add the top and bottom lines.
for x in shape.top_left.x..shape.top_left.x + shape.size.width {
self.add_pt(x, shape.top_left.y);
self.add_pt(x, shape.top_left.y + shape.size.height - 1);
}
// Add the left and right lines.
for y in shape.top_left.y..shape.top_left.y + shape.size.height {
self.add_pt(shape.top_left.x, y);
self.add_pt(shape.top_left.x + shape.size.width - 1, y);
}
}
}
// Rust's unit testing framework is currently a bit under-developed so we'll use
// this little helper.
pub fn check_strs(actual: &str, expected: &str) -> bool {
if actual!= expected {
println!("Found:\n{}\nbut expected\n{}", actual, expected);
return false;
}
return true;
}
fn test_ascii_art_ctor() {
let art = AsciiArt(3, 3, '*');
assert!(check_strs(&art.to_string(), "...\n...\n..."));
}
fn test_add_pt() {
let mut art = AsciiArt(3, 3, '*');
art.add_pt(0, 0);
art.add_pt(0, -10);
art.add_pt(1, 2);
assert!(check_strs(&art.to_string(), "*..\n...\n.*."));
}
fn test_shapes() {
let mut art = AsciiArt(4, 4, '*');
art.add_rect(Rect {top_left: Point {x: 0, y: 0}, size: Size {width: 4, height: 4}});
art.add_point(Point {x: 2, y: 2});
assert!(check_strs(&art.to_string(), "****\n*..*\n*.**\n****"));
}
pub fn main() {
test_ascii_art_ctor();
test_add_pt();
test_shapes();
}
|
random_line_split
|
|
issue-3563-3.rs
|
// run-pass
#![allow(unused_imports)]
#![allow(non_snake_case)]
// ASCII art shape renderer. Demonstrates traits, impls, operator overloading,
// non-copyable struct, unit testing. To run execute: rustc --test shapes.rs &&
//./shapes
// Rust's std library is tightly bound to the language itself so it is
// automatically linked in. However the extra library is designed to be
// optional (for code that must run on constrained environments like embedded
// devices or special environments like kernel code) so it must be explicitly
// linked in.
// Extern mod controls linkage. Use controls the visibility of names to modules
// that are already linked in. Using WriterUtil allows us to use the write_line
// method.
use std::fmt;
use std::iter::repeat;
use std::slice;
// Represents a position on a canvas.
#[derive(Copy, Clone)]
struct Point {
x: isize,
y: isize,
}
// Represents an offset on a canvas. (This has the same structure as a Point.
// but different semantics).
#[derive(Copy, Clone)]
struct Size {
width: isize,
height: isize,
}
#[derive(Copy, Clone)]
struct Rect {
top_left: Point,
size: Size,
}
// Contains the information needed to do shape rendering via ASCII art.
struct AsciiArt {
width: usize,
height: usize,
fill: char,
lines: Vec<Vec<char> >,
// This struct can be quite large so we'll disable copying: developers need
// to either pass these structs around via references or move them.
}
impl Drop for AsciiArt {
fn drop(&mut self) {}
}
// It's common to define a constructor sort of function to create struct instances.
// If there is a canonical constructor it is typically named the same as the type.
// Other constructor sort of functions are typically named from_foo, from_bar, etc.
fn AsciiArt(width: usize, height: usize, fill: char) -> AsciiArt {
// Build a vector of vectors containing blank characters for each position in
// our canvas.
let lines = vec![vec!['.'; width]; height];
// Rust code often returns values by omitting the trailing semi-colon
// instead of using an explicit return statement.
AsciiArt {width: width, height: height, fill: fill, lines: lines}
}
// Methods particular to the AsciiArt struct.
impl AsciiArt {
fn add_pt(&mut self, x: isize, y: isize) {
if x >= 0 && x < self.width as isize {
if y >= 0 && y < self.height as isize {
// Note that numeric types don't implicitly convert to each other.
let v = y as usize;
let h = x as usize;
// Vector subscripting will normally copy the element, but &v[i]
// will return a reference which is what we need because the
// element is:
// 1) potentially large
// 2) needs to be modified
let row = &mut self.lines[v];
row[h] = self.fill;
}
}
}
}
// Allows AsciiArt to be converted to a string using the libcore ToString trait.
// Note that the %s fmt! specifier will not call this automatically.
impl fmt::Display for AsciiArt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Convert each line into a string.
let lines = self.lines.iter()
.map(|line| line.iter().cloned().collect())
.collect::<Vec<String>>();
// Concatenate the lines together using a new-line.
write!(f, "{}", lines.join("\n"))
}
}
// This is similar to an interface in other languages: it defines a protocol which
// developers can implement for arbitrary concrete types.
trait Canvas {
fn add_point(&mut self, shape: Point);
fn add_rect(&mut self, shape: Rect);
// Unlike interfaces traits support default implementations.
// Got an ICE as soon as I added this method.
fn add_points(&mut self, shapes: &[Point]) {
for pt in shapes {self.add_point(*pt)};
}
}
// Here we provide an implementation of the Canvas methods for AsciiArt.
// Other implementations could also be provided (e.g., for PDF or Apple's Quartz)
// and code can use them polymorphically via the Canvas trait.
impl Canvas for AsciiArt {
fn add_point(&mut self, shape: Point) {
self.add_pt(shape.x, shape.y);
}
fn
|
(&mut self, shape: Rect) {
// Add the top and bottom lines.
for x in shape.top_left.x..shape.top_left.x + shape.size.width {
self.add_pt(x, shape.top_left.y);
self.add_pt(x, shape.top_left.y + shape.size.height - 1);
}
// Add the left and right lines.
for y in shape.top_left.y..shape.top_left.y + shape.size.height {
self.add_pt(shape.top_left.x, y);
self.add_pt(shape.top_left.x + shape.size.width - 1, y);
}
}
}
// Rust's unit testing framework is currently a bit under-developed so we'll use
// this little helper.
pub fn check_strs(actual: &str, expected: &str) -> bool {
if actual!= expected {
println!("Found:\n{}\nbut expected\n{}", actual, expected);
return false;
}
return true;
}
fn test_ascii_art_ctor() {
let art = AsciiArt(3, 3, '*');
assert!(check_strs(&art.to_string(), "...\n...\n..."));
}
fn test_add_pt() {
let mut art = AsciiArt(3, 3, '*');
art.add_pt(0, 0);
art.add_pt(0, -10);
art.add_pt(1, 2);
assert!(check_strs(&art.to_string(), "*..\n...\n.*."));
}
fn test_shapes() {
let mut art = AsciiArt(4, 4, '*');
art.add_rect(Rect {top_left: Point {x: 0, y: 0}, size: Size {width: 4, height: 4}});
art.add_point(Point {x: 2, y: 2});
assert!(check_strs(&art.to_string(), "****\n*..*\n*.**\n****"));
}
pub fn main() {
test_ascii_art_ctor();
test_add_pt();
test_shapes();
}
|
add_rect
|
identifier_name
|
room_key_request.rs
|
//! Types for the [`m.room_key_request`] event.
//!
//! [`m.room_key_request`]: https://spec.matrix.org/v1.2/client-server-api/#mroom_key_request
use ruma_macros::EventContent;
use ruma_serde::StringEnum;
use serde::{Deserialize, Serialize};
use crate::{DeviceId, EventEncryptionAlgorithm, PrivOwnedStr, RoomId, TransactionId};
/// The content of an `m.room_key_request` event.
#[derive(Clone, Debug, Deserialize, Serialize, EventContent)]
#[cfg_attr(not(feature = "unstable-exhaustive-types"), non_exhaustive)]
#[ruma_event(type = "m.room_key_request", kind = ToDevice)]
pub struct ToDeviceRoomKeyRequestEventContent {
/// Whether this is a new key request or a cancellation of a previous request.
pub action: Action,
/// Information about the requested key.
///
/// Required if action is `request`.
pub body: Option<RequestedKeyInfo>,
/// ID of the device requesting the key.
pub requesting_device_id: Box<DeviceId>,
/// A random string uniquely identifying the request for a key.
///
/// If the key is requested multiple times, it should be reused. It should also reused
/// in order to cancel a request.
pub request_id: Box<TransactionId>,
}
impl ToDeviceRoomKeyRequestEventContent {
/// Creates a new `ToDeviceRoomKeyRequestEventContent` with the given action, boyd, device ID
/// and request ID.
pub fn new(
action: Action,
body: Option<RequestedKeyInfo>,
requesting_device_id: Box<DeviceId>,
request_id: Box<TransactionId>,
) -> Self {
Self { action, body, requesting_device_id, request_id }
}
}
/// A new key request or a cancellation of a previous request.
///
/// This type can hold an arbitrary string. To check for formats that are not available as a
/// documented variant here, use its string representation, obtained through `.as_str()`.
#[derive(Clone, Debug, PartialEq, Eq, StringEnum)]
#[ruma_enum(rename_all = "snake_case")]
#[non_exhaustive]
pub enum Action {
/// Request a key.
Request,
/// Cancel a request for a key.
#[ruma_enum(rename = "request_cancellation")]
CancelRequest,
#[doc(hidden)]
_Custom(PrivOwnedStr),
}
impl Action {
/// Creates a string slice from this `Action`.
pub fn as_str(&self) -> &str {
self.as_ref()
}
}
/// Information about a requested key.
#[derive(Clone, Debug, Deserialize, Serialize)]
#[cfg_attr(not(feature = "unstable-exhaustive-types"), non_exhaustive)]
pub struct RequestedKeyInfo {
/// The encryption algorithm the requested key in this event is to be used with.
pub algorithm: EventEncryptionAlgorithm,
/// The room where the key is used.
pub room_id: Box<RoomId>,
/// The Curve25519 key of the device which initiated the session originally.
pub sender_key: String,
/// The ID of the session that the key is for.
pub session_id: String,
}
impl RequestedKeyInfo {
/// Creates a new `RequestedKeyInfo` with the given algorithm, room ID, sender key and session
/// ID.
pub fn new(
algorithm: EventEncryptionAlgorithm,
room_id: Box<RoomId>,
sender_key: String,
session_id: String,
) -> Self {
Self { algorithm, room_id, sender_key, session_id }
}
|
}
|
random_line_split
|
|
room_key_request.rs
|
//! Types for the [`m.room_key_request`] event.
//!
//! [`m.room_key_request`]: https://spec.matrix.org/v1.2/client-server-api/#mroom_key_request
use ruma_macros::EventContent;
use ruma_serde::StringEnum;
use serde::{Deserialize, Serialize};
use crate::{DeviceId, EventEncryptionAlgorithm, PrivOwnedStr, RoomId, TransactionId};
/// The content of an `m.room_key_request` event.
#[derive(Clone, Debug, Deserialize, Serialize, EventContent)]
#[cfg_attr(not(feature = "unstable-exhaustive-types"), non_exhaustive)]
#[ruma_event(type = "m.room_key_request", kind = ToDevice)]
pub struct ToDeviceRoomKeyRequestEventContent {
/// Whether this is a new key request or a cancellation of a previous request.
pub action: Action,
/// Information about the requested key.
///
/// Required if action is `request`.
pub body: Option<RequestedKeyInfo>,
/// ID of the device requesting the key.
pub requesting_device_id: Box<DeviceId>,
/// A random string uniquely identifying the request for a key.
///
/// If the key is requested multiple times, it should be reused. It should also reused
/// in order to cancel a request.
pub request_id: Box<TransactionId>,
}
impl ToDeviceRoomKeyRequestEventContent {
/// Creates a new `ToDeviceRoomKeyRequestEventContent` with the given action, boyd, device ID
/// and request ID.
pub fn new(
action: Action,
body: Option<RequestedKeyInfo>,
requesting_device_id: Box<DeviceId>,
request_id: Box<TransactionId>,
) -> Self {
Self { action, body, requesting_device_id, request_id }
}
}
/// A new key request or a cancellation of a previous request.
///
/// This type can hold an arbitrary string. To check for formats that are not available as a
/// documented variant here, use its string representation, obtained through `.as_str()`.
#[derive(Clone, Debug, PartialEq, Eq, StringEnum)]
#[ruma_enum(rename_all = "snake_case")]
#[non_exhaustive]
pub enum Action {
/// Request a key.
Request,
/// Cancel a request for a key.
#[ruma_enum(rename = "request_cancellation")]
CancelRequest,
#[doc(hidden)]
_Custom(PrivOwnedStr),
}
impl Action {
/// Creates a string slice from this `Action`.
pub fn
|
(&self) -> &str {
self.as_ref()
}
}
/// Information about a requested key.
#[derive(Clone, Debug, Deserialize, Serialize)]
#[cfg_attr(not(feature = "unstable-exhaustive-types"), non_exhaustive)]
pub struct RequestedKeyInfo {
/// The encryption algorithm the requested key in this event is to be used with.
pub algorithm: EventEncryptionAlgorithm,
/// The room where the key is used.
pub room_id: Box<RoomId>,
/// The Curve25519 key of the device which initiated the session originally.
pub sender_key: String,
/// The ID of the session that the key is for.
pub session_id: String,
}
impl RequestedKeyInfo {
/// Creates a new `RequestedKeyInfo` with the given algorithm, room ID, sender key and session
/// ID.
pub fn new(
algorithm: EventEncryptionAlgorithm,
room_id: Box<RoomId>,
sender_key: String,
session_id: String,
) -> Self {
Self { algorithm, room_id, sender_key, session_id }
}
}
|
as_str
|
identifier_name
|
restoration_status.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
|
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Restoration status type definition
/// Statuses for restorations.
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", binary)]
pub enum RestorationStatus {
/// No restoration.
Inactive,
/// Ongoing restoration.
Ongoing {
/// Total number of state chunks.
state_chunks: u32,
/// Total number of block chunks.
block_chunks: u32,
/// Number of state chunks completed.
state_chunks_done: u32,
/// Number of block chunks completed.
block_chunks_done: u32,
},
/// Failed restoration.
Failed,
}
|
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
|
random_line_split
|
restoration_status.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Restoration status type definition
/// Statuses for restorations.
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
#[cfg_attr(feature = "ipc", binary)]
pub enum
|
{
/// No restoration.
Inactive,
/// Ongoing restoration.
Ongoing {
/// Total number of state chunks.
state_chunks: u32,
/// Total number of block chunks.
block_chunks: u32,
/// Number of state chunks completed.
state_chunks_done: u32,
/// Number of block chunks completed.
block_chunks_done: u32,
},
/// Failed restoration.
Failed,
}
|
RestorationStatus
|
identifier_name
|
motion.rs
|
use super::*;
impl Editor {
/// Convert an instruction to a motion (new coordinate). Returns None if the instructions given
/// either is invalid or has no movement.
///
/// A motion is a namespace (i.e. non mode-specific set of commands), which represents
/// movements. These are useful for commands which takes a motion as post-parameter, such as d.
/// d deletes the text given by the motion following. Other commands can make use of motions,
/// using this method.
pub fn
|
(&mut self, Inst(n, cmd): Inst) -> Option<(usize, usize)> {
use super::Key::*;
match cmd.key {
Char('h') => Some(self.left(n.d())),
Char('l') => Some(self.right(n.d())),
Char('j') => Some(self.down(n.d())),
Char('k') => Some(self.up(n.d())),
Char('g') => Some((0, n.or(1) - 1)),
Char('G') => Some((0, self.text.len() - 1)),
Char('L') => Some(self.ln_end()),
Char('H') => Some((0, self.y())),
Char('t') => {
let ch = self.get_char();
if let Some(o) = self.next_ocur(ch, n.d()) {
Some(o)
} else {
None
}
},
Char('f') => {
let ch = self.get_char();
if let Some(o) = self.previous_ocur(ch, n.d()) {
Some(o)
} else {
None
}
},
Char(c) => {
self.status_bar.msg = format!("Motion not defined: '{}'", c);
self.redraw_status_bar();
None
},
_ => {
self.status_bar.msg = format!("Motion not defined");
None
},
}
}
/// Like to_motion() but does not bound to the text. Therefore it returns an isize, and in some
/// cases it's a position which is out of bounds. This is useful when commands want to mesure
/// the relative movement over the movement.
pub fn to_motion_unbounded(&mut self, Inst(n, cmd): Inst) -> Option<(isize, isize)> {
use super::Key::*;
match cmd.key {
Char('h') => Some(self.left_unbounded(n.d())),
Char('l') => Some(self.right_unbounded(n.d())),
Char('j') => Some(self.down_unbounded(n.d())),
Char('k') => Some(self.up_unbounded(n.d())),
Char('g') => Some((0, n.or(1) as isize - 1)),
Char('G') => Some((0, self.text.len() as isize - 1)),
Char('L') => Some(to_signed_pos(self.ln_end())),
Char('H') => Some((0, self.y() as isize)),
Char('t') => {
let ch = self.get_char();
if let Some(o) = self.next_ocur(ch, n.d()) {
Some(to_signed_pos(o))
} else {
None
}
},
Char('f') => {
let ch = self.get_char();
if let Some(o) = self.previous_ocur(ch, n.d()) {
Some(to_signed_pos(o))
} else {
None
}
},
_ => None,
}
}
}
|
to_motion
|
identifier_name
|
motion.rs
|
use super::*;
impl Editor {
/// Convert an instruction to a motion (new coordinate). Returns None if the instructions given
/// either is invalid or has no movement.
///
/// A motion is a namespace (i.e. non mode-specific set of commands), which represents
/// movements. These are useful for commands which takes a motion as post-parameter, such as d.
/// d deletes the text given by the motion following. Other commands can make use of motions,
/// using this method.
pub fn to_motion(&mut self, Inst(n, cmd): Inst) -> Option<(usize, usize)> {
use super::Key::*;
match cmd.key {
Char('h') => Some(self.left(n.d())),
Char('l') => Some(self.right(n.d())),
Char('j') => Some(self.down(n.d())),
Char('k') => Some(self.up(n.d())),
Char('g') => Some((0, n.or(1) - 1)),
Char('G') => Some((0, self.text.len() - 1)),
Char('L') => Some(self.ln_end()),
Char('H') => Some((0, self.y())),
Char('t') => {
let ch = self.get_char();
if let Some(o) = self.next_ocur(ch, n.d()) {
Some(o)
} else {
None
}
},
Char('f') => {
let ch = self.get_char();
if let Some(o) = self.previous_ocur(ch, n.d()) {
Some(o)
} else
|
},
Char(c) => {
self.status_bar.msg = format!("Motion not defined: '{}'", c);
self.redraw_status_bar();
None
},
_ => {
self.status_bar.msg = format!("Motion not defined");
None
},
}
}
/// Like to_motion() but does not bound to the text. Therefore it returns an isize, and in some
/// cases it's a position which is out of bounds. This is useful when commands want to mesure
/// the relative movement over the movement.
pub fn to_motion_unbounded(&mut self, Inst(n, cmd): Inst) -> Option<(isize, isize)> {
use super::Key::*;
match cmd.key {
Char('h') => Some(self.left_unbounded(n.d())),
Char('l') => Some(self.right_unbounded(n.d())),
Char('j') => Some(self.down_unbounded(n.d())),
Char('k') => Some(self.up_unbounded(n.d())),
Char('g') => Some((0, n.or(1) as isize - 1)),
Char('G') => Some((0, self.text.len() as isize - 1)),
Char('L') => Some(to_signed_pos(self.ln_end())),
Char('H') => Some((0, self.y() as isize)),
Char('t') => {
let ch = self.get_char();
if let Some(o) = self.next_ocur(ch, n.d()) {
Some(to_signed_pos(o))
} else {
None
}
},
Char('f') => {
let ch = self.get_char();
if let Some(o) = self.previous_ocur(ch, n.d()) {
Some(to_signed_pos(o))
} else {
None
}
},
_ => None,
}
}
}
|
{
None
}
|
conditional_block
|
motion.rs
|
/// either is invalid or has no movement.
///
/// A motion is a namespace (i.e. non mode-specific set of commands), which represents
/// movements. These are useful for commands which takes a motion as post-parameter, such as d.
/// d deletes the text given by the motion following. Other commands can make use of motions,
/// using this method.
pub fn to_motion(&mut self, Inst(n, cmd): Inst) -> Option<(usize, usize)> {
use super::Key::*;
match cmd.key {
Char('h') => Some(self.left(n.d())),
Char('l') => Some(self.right(n.d())),
Char('j') => Some(self.down(n.d())),
Char('k') => Some(self.up(n.d())),
Char('g') => Some((0, n.or(1) - 1)),
Char('G') => Some((0, self.text.len() - 1)),
Char('L') => Some(self.ln_end()),
Char('H') => Some((0, self.y())),
Char('t') => {
let ch = self.get_char();
if let Some(o) = self.next_ocur(ch, n.d()) {
Some(o)
} else {
None
}
},
Char('f') => {
let ch = self.get_char();
if let Some(o) = self.previous_ocur(ch, n.d()) {
Some(o)
} else {
None
}
},
Char(c) => {
self.status_bar.msg = format!("Motion not defined: '{}'", c);
self.redraw_status_bar();
None
},
_ => {
self.status_bar.msg = format!("Motion not defined");
None
},
}
}
/// Like to_motion() but does not bound to the text. Therefore it returns an isize, and in some
/// cases it's a position which is out of bounds. This is useful when commands want to mesure
/// the relative movement over the movement.
pub fn to_motion_unbounded(&mut self, Inst(n, cmd): Inst) -> Option<(isize, isize)> {
use super::Key::*;
match cmd.key {
Char('h') => Some(self.left_unbounded(n.d())),
Char('l') => Some(self.right_unbounded(n.d())),
Char('j') => Some(self.down_unbounded(n.d())),
Char('k') => Some(self.up_unbounded(n.d())),
Char('g') => Some((0, n.or(1) as isize - 1)),
Char('G') => Some((0, self.text.len() as isize - 1)),
Char('L') => Some(to_signed_pos(self.ln_end())),
Char('H') => Some((0, self.y() as isize)),
Char('t') => {
let ch = self.get_char();
if let Some(o) = self.next_ocur(ch, n.d()) {
Some(to_signed_pos(o))
} else {
None
}
},
Char('f') => {
let ch = self.get_char();
if let Some(o) = self.previous_ocur(ch, n.d()) {
Some(to_signed_pos(o))
} else {
None
}
},
_ => None,
}
}
}
|
use super::*;
impl Editor {
/// Convert an instruction to a motion (new coordinate). Returns None if the instructions given
|
random_line_split
|
|
motion.rs
|
use super::*;
impl Editor {
/// Convert an instruction to a motion (new coordinate). Returns None if the instructions given
/// either is invalid or has no movement.
///
/// A motion is a namespace (i.e. non mode-specific set of commands), which represents
/// movements. These are useful for commands which takes a motion as post-parameter, such as d.
/// d deletes the text given by the motion following. Other commands can make use of motions,
/// using this method.
pub fn to_motion(&mut self, Inst(n, cmd): Inst) -> Option<(usize, usize)> {
use super::Key::*;
match cmd.key {
Char('h') => Some(self.left(n.d())),
Char('l') => Some(self.right(n.d())),
Char('j') => Some(self.down(n.d())),
Char('k') => Some(self.up(n.d())),
Char('g') => Some((0, n.or(1) - 1)),
Char('G') => Some((0, self.text.len() - 1)),
Char('L') => Some(self.ln_end()),
Char('H') => Some((0, self.y())),
Char('t') => {
let ch = self.get_char();
if let Some(o) = self.next_ocur(ch, n.d()) {
Some(o)
} else {
None
}
},
Char('f') => {
let ch = self.get_char();
if let Some(o) = self.previous_ocur(ch, n.d()) {
Some(o)
} else {
None
}
},
Char(c) => {
self.status_bar.msg = format!("Motion not defined: '{}'", c);
self.redraw_status_bar();
None
},
_ => {
self.status_bar.msg = format!("Motion not defined");
None
},
}
}
/// Like to_motion() but does not bound to the text. Therefore it returns an isize, and in some
/// cases it's a position which is out of bounds. This is useful when commands want to mesure
/// the relative movement over the movement.
pub fn to_motion_unbounded(&mut self, Inst(n, cmd): Inst) -> Option<(isize, isize)>
|
},
Char('f') => {
let ch = self.get_char();
if let Some(o) = self.previous_ocur(ch, n.d()) {
Some(to_signed_pos(o))
} else {
None
}
},
_ => None,
}
}
}
|
{
use super::Key::*;
match cmd.key {
Char('h') => Some(self.left_unbounded(n.d())),
Char('l') => Some(self.right_unbounded(n.d())),
Char('j') => Some(self.down_unbounded(n.d())),
Char('k') => Some(self.up_unbounded(n.d())),
Char('g') => Some((0, n.or(1) as isize - 1)),
Char('G') => Some((0, self.text.len() as isize - 1)),
Char('L') => Some(to_signed_pos(self.ln_end())),
Char('H') => Some((0, self.y() as isize)),
Char('t') => {
let ch = self.get_char();
if let Some(o) = self.next_ocur(ch, n.d()) {
Some(to_signed_pos(o))
} else {
None
}
|
identifier_body
|
lib.rs
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
//! # ConfigParser
//!
//! ConfigParser is a utility to parse hgrc-like config files.
//!
//! ## Features
//!
//! - Parse valid hgrc-like config files efficiently.
//! - Track source locations of config values. Keep multiple locations of
//! a same config if it is overridden.
//!
//! ## Config Format
//!
//! hgrc files are similar to INI files:
//!
//! ```plain,ignore
//! [section1]
//! name1 = value1
//! name2 = value2
//!
//! [section2]
//! name3 = value3
//!
//! ; This is a comment.
//! # This is also a comment.
//! ```
//!
//! But with some additional features.
//!
//! ### Include other config files
//!
//! Use `%include` to include other config files:
//!
//! ```plain,ignore
//! %include path/to/another/hgrc
|
//! %include path/to/another/hgrc.d
//! ```
//!
//! The include path is relative to the directory of the current config
//! file being parsed. If it's a directory, files with names ending
//! with `.rc` in it will be read.
//!
//! ### Unset a config
//!
//! Use `%unset` to unset a config:
//!
//! ```plain,ignore
//! [section]
//! %unset name1
//! ```
//!
//! ### Multi-line values
//!
//! Indent non-first lines with a space:
//!
//! ```plain,ignore
//! [section]
//! name1 = value
//! line2
//! line3
//! ```
pub mod c_api;
pub mod config;
pub mod convert;
pub mod dynamicconfig;
pub mod error;
pub mod hg;
pub mod parser;
pub use error::{Error, Errors};
// Re-export
pub use minibytes::Text;
#[cfg(feature = "fb")]
mod fb;
#[cfg(test)]
use lazy_static::lazy_static;
#[cfg(test)]
use parking_lot::Mutex;
#[cfg(test)]
lazy_static! {
/// Lock for the environment. This should be acquired by tests that rely on particular
/// environment variable values that might be overwritten by other tests.
pub static ref ENV_LOCK: Mutex<()> = Mutex::new(());
}
|
random_line_split
|
|
llvm-asm-in-moved.rs
|
// run-pass
#![feature(llvm_asm)]
|
#[repr(C)]
struct NoisyDrop<'a>(&'a Cell<&'static str>);
impl<'a> Drop for NoisyDrop<'a> {
fn drop(&mut self) {
self.0.set("destroyed");
}
}
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
fn main() {
let status = Cell::new("alive");
{
let _y: Box<NoisyDrop>;
let x = Box::new(NoisyDrop(&status));
unsafe {
llvm_asm!("mov $1, $0" : "=r"(_y) : "r"(x));
}
assert_eq!(status.get(), "alive");
}
assert_eq!(status.get(), "destroyed");
}
#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
fn main() {}
|
#![allow(deprecated)] // llvm_asm!
#![allow(dead_code)]
use std::cell::Cell;
|
random_line_split
|
llvm-asm-in-moved.rs
|
// run-pass
#![feature(llvm_asm)]
#![allow(deprecated)] // llvm_asm!
#![allow(dead_code)]
use std::cell::Cell;
#[repr(C)]
struct
|
<'a>(&'a Cell<&'static str>);
impl<'a> Drop for NoisyDrop<'a> {
fn drop(&mut self) {
self.0.set("destroyed");
}
}
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
fn main() {
let status = Cell::new("alive");
{
let _y: Box<NoisyDrop>;
let x = Box::new(NoisyDrop(&status));
unsafe {
llvm_asm!("mov $1, $0" : "=r"(_y) : "r"(x));
}
assert_eq!(status.get(), "alive");
}
assert_eq!(status.get(), "destroyed");
}
#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
fn main() {}
|
NoisyDrop
|
identifier_name
|
llvm-asm-in-moved.rs
|
// run-pass
#![feature(llvm_asm)]
#![allow(deprecated)] // llvm_asm!
#![allow(dead_code)]
use std::cell::Cell;
#[repr(C)]
struct NoisyDrop<'a>(&'a Cell<&'static str>);
impl<'a> Drop for NoisyDrop<'a> {
fn drop(&mut self)
|
}
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
fn main() {
let status = Cell::new("alive");
{
let _y: Box<NoisyDrop>;
let x = Box::new(NoisyDrop(&status));
unsafe {
llvm_asm!("mov $1, $0" : "=r"(_y) : "r"(x));
}
assert_eq!(status.get(), "alive");
}
assert_eq!(status.get(), "destroyed");
}
#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
fn main() {}
|
{
self.0.set("destroyed");
}
|
identifier_body
|
basic_shape.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! CSS handling for the [`basic-shape`](https://drafts.csswg.org/css-shapes/#typedef-basic-shape)
//! types that are generic over their `ToCss` implementations.
use crate::values::animated::{Animate, Procedure, ToAnimatedZero};
use crate::values::distance::{ComputeSquaredDistance, SquaredDistance};
use crate::values::generics::border::BorderRadius;
use crate::values::generics::position::Position;
use crate::values::generics::rect::Rect;
use crate::values::specified::SVGPathData;
use std::fmt::{self, Write};
use style_traits::{CssWriter, ToCss};
/// A clipping shape, for `clip-path`.
|
#[allow(missing_docs)]
#[derive(
Animate, Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss,
)]
pub enum GeometryBox {
FillBox,
StrokeBox,
ViewBox,
ShapeBox(ShapeBox),
}
/// A float area shape, for `shape-outside`.
pub type FloatAreaShape<BasicShape, Image> = ShapeSource<BasicShape, ShapeBox, Image>;
/// https://drafts.csswg.org/css-shapes-1/#typedef-shape-box
#[allow(missing_docs)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(
Animate,
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
Parse,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
)]
pub enum ShapeBox {
MarginBox,
BorderBox,
PaddingBox,
ContentBox,
}
/// A shape source, for some reference box.
#[allow(missing_docs)]
#[animation(no_bound(ImageOrUrl))]
#[derive(
Animate, Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss,
)]
pub enum ShapeSource<BasicShape, ReferenceBox, ImageOrUrl> {
#[animation(error)]
ImageOrUrl(ImageOrUrl),
Shape(BasicShape, Option<ReferenceBox>),
#[animation(error)]
Box(ReferenceBox),
#[css(function)]
Path(Path),
#[animation(error)]
None,
}
#[allow(missing_docs)]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
)]
pub enum BasicShape<H, V, LengthOrPercentage> {
Inset(#[css(field_bound)] InsetRect<LengthOrPercentage>),
Circle(#[css(field_bound)] Circle<H, V, LengthOrPercentage>),
Ellipse(#[css(field_bound)] Ellipse<H, V, LengthOrPercentage>),
Polygon(Polygon<LengthOrPercentage>),
}
/// <https://drafts.csswg.org/css-shapes/#funcdef-inset>
#[allow(missing_docs)]
#[css(function = "inset")]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
)]
pub struct InsetRect<LengthOrPercentage> {
pub rect: Rect<LengthOrPercentage>,
pub round: Option<BorderRadius<LengthOrPercentage>>,
}
/// <https://drafts.csswg.org/css-shapes/#funcdef-circle>
#[allow(missing_docs)]
#[css(function)]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
)]
pub struct Circle<H, V, LengthOrPercentage> {
pub position: Position<H, V>,
pub radius: ShapeRadius<LengthOrPercentage>,
}
/// <https://drafts.csswg.org/css-shapes/#funcdef-ellipse>
#[allow(missing_docs)]
#[css(function)]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
)]
pub struct Ellipse<H, V, LengthOrPercentage> {
pub position: Position<H, V>,
pub semiaxis_x: ShapeRadius<LengthOrPercentage>,
pub semiaxis_y: ShapeRadius<LengthOrPercentage>,
}
/// <https://drafts.csswg.org/css-shapes/#typedef-shape-radius>
#[allow(missing_docs)]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
)]
pub enum ShapeRadius<LengthOrPercentage> {
Length(LengthOrPercentage),
#[animation(error)]
ClosestSide,
#[animation(error)]
FarthestSide,
}
/// A generic type for representing the `polygon()` function
///
/// <https://drafts.csswg.org/css-shapes/#funcdef-polygon>
#[css(comma, function)]
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss)]
pub struct Polygon<LengthOrPercentage> {
/// The filling rule for a polygon.
#[css(skip_if = "fill_is_default")]
pub fill: FillRule,
/// A collection of (x, y) coordinates to draw the polygon.
#[css(iterable)]
pub coordinates: Vec<PolygonCoord<LengthOrPercentage>>,
}
/// Coordinates for Polygon.
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss)]
pub struct PolygonCoord<LengthOrPercentage>(pub LengthOrPercentage, pub LengthOrPercentage);
// https://drafts.csswg.org/css-shapes/#typedef-fill-rule
// NOTE: Basic shapes spec says that these are the only two values, however
// https://www.w3.org/TR/SVG/painting.html#FillRuleProperty
// says that it can also be `inherit`
#[allow(missing_docs)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
Parse,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
)]
#[repr(u8)]
pub enum FillRule {
Nonzero,
Evenodd,
}
/// The path function defined in css-shape-2.
///
/// https://drafts.csswg.org/css-shapes-2/#funcdef-path
#[css(comma)]
#[derive(
Animate, Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss,
)]
pub struct Path {
/// The filling rule for the svg path.
#[css(skip_if = "fill_is_default")]
#[animation(constant)]
pub fill: FillRule,
/// The svg path data.
pub path: SVGPathData,
}
// FIXME(nox): Implement ComputeSquaredDistance for T types and stop
// using PartialEq here, this will let us derive this impl.
impl<B, T, U> ComputeSquaredDistance for ShapeSource<B, T, U>
where
B: ComputeSquaredDistance,
T: PartialEq,
{
fn compute_squared_distance(&self, other: &Self) -> Result<SquaredDistance, ()> {
match (self, other) {
(
&ShapeSource::Shape(ref this, ref this_box),
&ShapeSource::Shape(ref other, ref other_box),
)
if this_box == other_box =>
{
this.compute_squared_distance(other)
},
(&ShapeSource::Path(ref this), &ShapeSource::Path(ref other))
if this.fill == other.fill =>
{
this.path.compute_squared_distance(&other.path)
},
_ => Err(()),
}
}
}
impl<B, T, U> ToAnimatedZero for ShapeSource<B, T, U> {
fn to_animated_zero(&self) -> Result<Self, ()> {
Err(())
}
}
impl<L> ToCss for InsetRect<L>
where
L: ToCss + PartialEq,
{
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
dest.write_str("inset(")?;
self.rect.to_css(dest)?;
if let Some(ref radius) = self.round {
dest.write_str(" round ")?;
radius.to_css(dest)?;
}
dest.write_str(")")
}
}
impl<L> Default for ShapeRadius<L> {
#[inline]
fn default() -> Self {
ShapeRadius::ClosestSide
}
}
impl<L> Animate for Polygon<L>
where
L: Animate,
{
fn animate(&self, other: &Self, procedure: Procedure) -> Result<Self, ()> {
if self.fill!= other.fill {
return Err(());
}
if self.coordinates.len()!= other.coordinates.len() {
return Err(());
}
let coordinates = self
.coordinates
.iter()
.zip(other.coordinates.iter())
.map(|(this, other)| {
Ok(PolygonCoord(
this.0.animate(&other.0, procedure)?,
this.1.animate(&other.1, procedure)?,
))
})
.collect::<Result<Vec<_>, _>>()?;
Ok(Polygon {
fill: self.fill,
coordinates,
})
}
}
impl<L> ComputeSquaredDistance for Polygon<L>
where
L: ComputeSquaredDistance,
{
fn compute_squared_distance(&self, other: &Self) -> Result<SquaredDistance, ()> {
if self.fill!= other.fill {
return Err(());
}
if self.coordinates.len()!= other.coordinates.len() {
return Err(());
}
self.coordinates
.iter()
.zip(other.coordinates.iter())
.map(|(this, other)| {
let d1 = this.0.compute_squared_distance(&other.0)?;
let d2 = this.1.compute_squared_distance(&other.1)?;
Ok(d1 + d2)
})
.sum()
}
}
impl Default for FillRule {
#[inline]
fn default() -> Self {
FillRule::Nonzero
}
}
#[inline]
fn fill_is_default(fill: &FillRule) -> bool {
*fill == FillRule::default()
}
|
pub type ClippingShape<BasicShape, Url> = ShapeSource<BasicShape, GeometryBox, Url>;
/// <https://drafts.fxtf.org/css-masking-1/#typedef-geometry-box>
|
random_line_split
|
basic_shape.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! CSS handling for the [`basic-shape`](https://drafts.csswg.org/css-shapes/#typedef-basic-shape)
//! types that are generic over their `ToCss` implementations.
use crate::values::animated::{Animate, Procedure, ToAnimatedZero};
use crate::values::distance::{ComputeSquaredDistance, SquaredDistance};
use crate::values::generics::border::BorderRadius;
use crate::values::generics::position::Position;
use crate::values::generics::rect::Rect;
use crate::values::specified::SVGPathData;
use std::fmt::{self, Write};
use style_traits::{CssWriter, ToCss};
/// A clipping shape, for `clip-path`.
pub type ClippingShape<BasicShape, Url> = ShapeSource<BasicShape, GeometryBox, Url>;
/// <https://drafts.fxtf.org/css-masking-1/#typedef-geometry-box>
#[allow(missing_docs)]
#[derive(
Animate, Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss,
)]
pub enum GeometryBox {
FillBox,
StrokeBox,
ViewBox,
ShapeBox(ShapeBox),
}
/// A float area shape, for `shape-outside`.
pub type FloatAreaShape<BasicShape, Image> = ShapeSource<BasicShape, ShapeBox, Image>;
/// https://drafts.csswg.org/css-shapes-1/#typedef-shape-box
#[allow(missing_docs)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(
Animate,
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
Parse,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
)]
pub enum ShapeBox {
MarginBox,
BorderBox,
PaddingBox,
ContentBox,
}
/// A shape source, for some reference box.
#[allow(missing_docs)]
#[animation(no_bound(ImageOrUrl))]
#[derive(
Animate, Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss,
)]
pub enum ShapeSource<BasicShape, ReferenceBox, ImageOrUrl> {
#[animation(error)]
ImageOrUrl(ImageOrUrl),
Shape(BasicShape, Option<ReferenceBox>),
#[animation(error)]
Box(ReferenceBox),
#[css(function)]
Path(Path),
#[animation(error)]
None,
}
#[allow(missing_docs)]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
)]
pub enum BasicShape<H, V, LengthOrPercentage> {
Inset(#[css(field_bound)] InsetRect<LengthOrPercentage>),
Circle(#[css(field_bound)] Circle<H, V, LengthOrPercentage>),
Ellipse(#[css(field_bound)] Ellipse<H, V, LengthOrPercentage>),
Polygon(Polygon<LengthOrPercentage>),
}
/// <https://drafts.csswg.org/css-shapes/#funcdef-inset>
#[allow(missing_docs)]
#[css(function = "inset")]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
)]
pub struct InsetRect<LengthOrPercentage> {
pub rect: Rect<LengthOrPercentage>,
pub round: Option<BorderRadius<LengthOrPercentage>>,
}
/// <https://drafts.csswg.org/css-shapes/#funcdef-circle>
#[allow(missing_docs)]
#[css(function)]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
)]
pub struct Circle<H, V, LengthOrPercentage> {
pub position: Position<H, V>,
pub radius: ShapeRadius<LengthOrPercentage>,
}
/// <https://drafts.csswg.org/css-shapes/#funcdef-ellipse>
#[allow(missing_docs)]
#[css(function)]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
)]
pub struct Ellipse<H, V, LengthOrPercentage> {
pub position: Position<H, V>,
pub semiaxis_x: ShapeRadius<LengthOrPercentage>,
pub semiaxis_y: ShapeRadius<LengthOrPercentage>,
}
/// <https://drafts.csswg.org/css-shapes/#typedef-shape-radius>
#[allow(missing_docs)]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
)]
pub enum ShapeRadius<LengthOrPercentage> {
Length(LengthOrPercentage),
#[animation(error)]
ClosestSide,
#[animation(error)]
FarthestSide,
}
/// A generic type for representing the `polygon()` function
///
/// <https://drafts.csswg.org/css-shapes/#funcdef-polygon>
#[css(comma, function)]
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss)]
pub struct Polygon<LengthOrPercentage> {
/// The filling rule for a polygon.
#[css(skip_if = "fill_is_default")]
pub fill: FillRule,
/// A collection of (x, y) coordinates to draw the polygon.
#[css(iterable)]
pub coordinates: Vec<PolygonCoord<LengthOrPercentage>>,
}
/// Coordinates for Polygon.
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss)]
pub struct PolygonCoord<LengthOrPercentage>(pub LengthOrPercentage, pub LengthOrPercentage);
// https://drafts.csswg.org/css-shapes/#typedef-fill-rule
// NOTE: Basic shapes spec says that these are the only two values, however
// https://www.w3.org/TR/SVG/painting.html#FillRuleProperty
// says that it can also be `inherit`
#[allow(missing_docs)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
Parse,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
)]
#[repr(u8)]
pub enum FillRule {
Nonzero,
Evenodd,
}
/// The path function defined in css-shape-2.
///
/// https://drafts.csswg.org/css-shapes-2/#funcdef-path
#[css(comma)]
#[derive(
Animate, Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss,
)]
pub struct
|
{
/// The filling rule for the svg path.
#[css(skip_if = "fill_is_default")]
#[animation(constant)]
pub fill: FillRule,
/// The svg path data.
pub path: SVGPathData,
}
// FIXME(nox): Implement ComputeSquaredDistance for T types and stop
// using PartialEq here, this will let us derive this impl.
impl<B, T, U> ComputeSquaredDistance for ShapeSource<B, T, U>
where
B: ComputeSquaredDistance,
T: PartialEq,
{
fn compute_squared_distance(&self, other: &Self) -> Result<SquaredDistance, ()> {
match (self, other) {
(
&ShapeSource::Shape(ref this, ref this_box),
&ShapeSource::Shape(ref other, ref other_box),
)
if this_box == other_box =>
{
this.compute_squared_distance(other)
},
(&ShapeSource::Path(ref this), &ShapeSource::Path(ref other))
if this.fill == other.fill =>
{
this.path.compute_squared_distance(&other.path)
},
_ => Err(()),
}
}
}
impl<B, T, U> ToAnimatedZero for ShapeSource<B, T, U> {
fn to_animated_zero(&self) -> Result<Self, ()> {
Err(())
}
}
impl<L> ToCss for InsetRect<L>
where
L: ToCss + PartialEq,
{
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
dest.write_str("inset(")?;
self.rect.to_css(dest)?;
if let Some(ref radius) = self.round {
dest.write_str(" round ")?;
radius.to_css(dest)?;
}
dest.write_str(")")
}
}
impl<L> Default for ShapeRadius<L> {
#[inline]
fn default() -> Self {
ShapeRadius::ClosestSide
}
}
impl<L> Animate for Polygon<L>
where
L: Animate,
{
fn animate(&self, other: &Self, procedure: Procedure) -> Result<Self, ()> {
if self.fill!= other.fill {
return Err(());
}
if self.coordinates.len()!= other.coordinates.len() {
return Err(());
}
let coordinates = self
.coordinates
.iter()
.zip(other.coordinates.iter())
.map(|(this, other)| {
Ok(PolygonCoord(
this.0.animate(&other.0, procedure)?,
this.1.animate(&other.1, procedure)?,
))
})
.collect::<Result<Vec<_>, _>>()?;
Ok(Polygon {
fill: self.fill,
coordinates,
})
}
}
impl<L> ComputeSquaredDistance for Polygon<L>
where
L: ComputeSquaredDistance,
{
fn compute_squared_distance(&self, other: &Self) -> Result<SquaredDistance, ()> {
if self.fill!= other.fill {
return Err(());
}
if self.coordinates.len()!= other.coordinates.len() {
return Err(());
}
self.coordinates
.iter()
.zip(other.coordinates.iter())
.map(|(this, other)| {
let d1 = this.0.compute_squared_distance(&other.0)?;
let d2 = this.1.compute_squared_distance(&other.1)?;
Ok(d1 + d2)
})
.sum()
}
}
impl Default for FillRule {
#[inline]
fn default() -> Self {
FillRule::Nonzero
}
}
#[inline]
fn fill_is_default(fill: &FillRule) -> bool {
*fill == FillRule::default()
}
|
Path
|
identifier_name
|
basic_shape.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! CSS handling for the [`basic-shape`](https://drafts.csswg.org/css-shapes/#typedef-basic-shape)
//! types that are generic over their `ToCss` implementations.
use crate::values::animated::{Animate, Procedure, ToAnimatedZero};
use crate::values::distance::{ComputeSquaredDistance, SquaredDistance};
use crate::values::generics::border::BorderRadius;
use crate::values::generics::position::Position;
use crate::values::generics::rect::Rect;
use crate::values::specified::SVGPathData;
use std::fmt::{self, Write};
use style_traits::{CssWriter, ToCss};
/// A clipping shape, for `clip-path`.
pub type ClippingShape<BasicShape, Url> = ShapeSource<BasicShape, GeometryBox, Url>;
/// <https://drafts.fxtf.org/css-masking-1/#typedef-geometry-box>
#[allow(missing_docs)]
#[derive(
Animate, Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss,
)]
pub enum GeometryBox {
FillBox,
StrokeBox,
ViewBox,
ShapeBox(ShapeBox),
}
/// A float area shape, for `shape-outside`.
pub type FloatAreaShape<BasicShape, Image> = ShapeSource<BasicShape, ShapeBox, Image>;
/// https://drafts.csswg.org/css-shapes-1/#typedef-shape-box
#[allow(missing_docs)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(
Animate,
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
Parse,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
)]
pub enum ShapeBox {
MarginBox,
BorderBox,
PaddingBox,
ContentBox,
}
/// A shape source, for some reference box.
#[allow(missing_docs)]
#[animation(no_bound(ImageOrUrl))]
#[derive(
Animate, Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss,
)]
pub enum ShapeSource<BasicShape, ReferenceBox, ImageOrUrl> {
#[animation(error)]
ImageOrUrl(ImageOrUrl),
Shape(BasicShape, Option<ReferenceBox>),
#[animation(error)]
Box(ReferenceBox),
#[css(function)]
Path(Path),
#[animation(error)]
None,
}
#[allow(missing_docs)]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
)]
pub enum BasicShape<H, V, LengthOrPercentage> {
Inset(#[css(field_bound)] InsetRect<LengthOrPercentage>),
Circle(#[css(field_bound)] Circle<H, V, LengthOrPercentage>),
Ellipse(#[css(field_bound)] Ellipse<H, V, LengthOrPercentage>),
Polygon(Polygon<LengthOrPercentage>),
}
/// <https://drafts.csswg.org/css-shapes/#funcdef-inset>
#[allow(missing_docs)]
#[css(function = "inset")]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
)]
pub struct InsetRect<LengthOrPercentage> {
pub rect: Rect<LengthOrPercentage>,
pub round: Option<BorderRadius<LengthOrPercentage>>,
}
/// <https://drafts.csswg.org/css-shapes/#funcdef-circle>
#[allow(missing_docs)]
#[css(function)]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
)]
pub struct Circle<H, V, LengthOrPercentage> {
pub position: Position<H, V>,
pub radius: ShapeRadius<LengthOrPercentage>,
}
/// <https://drafts.csswg.org/css-shapes/#funcdef-ellipse>
#[allow(missing_docs)]
#[css(function)]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
)]
pub struct Ellipse<H, V, LengthOrPercentage> {
pub position: Position<H, V>,
pub semiaxis_x: ShapeRadius<LengthOrPercentage>,
pub semiaxis_y: ShapeRadius<LengthOrPercentage>,
}
/// <https://drafts.csswg.org/css-shapes/#typedef-shape-radius>
#[allow(missing_docs)]
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
)]
pub enum ShapeRadius<LengthOrPercentage> {
Length(LengthOrPercentage),
#[animation(error)]
ClosestSide,
#[animation(error)]
FarthestSide,
}
/// A generic type for representing the `polygon()` function
///
/// <https://drafts.csswg.org/css-shapes/#funcdef-polygon>
#[css(comma, function)]
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss)]
pub struct Polygon<LengthOrPercentage> {
/// The filling rule for a polygon.
#[css(skip_if = "fill_is_default")]
pub fill: FillRule,
/// A collection of (x, y) coordinates to draw the polygon.
#[css(iterable)]
pub coordinates: Vec<PolygonCoord<LengthOrPercentage>>,
}
/// Coordinates for Polygon.
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss)]
pub struct PolygonCoord<LengthOrPercentage>(pub LengthOrPercentage, pub LengthOrPercentage);
// https://drafts.csswg.org/css-shapes/#typedef-fill-rule
// NOTE: Basic shapes spec says that these are the only two values, however
// https://www.w3.org/TR/SVG/painting.html#FillRuleProperty
// says that it can also be `inherit`
#[allow(missing_docs)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(
Clone,
Copy,
Debug,
Eq,
MallocSizeOf,
Parse,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
)]
#[repr(u8)]
pub enum FillRule {
Nonzero,
Evenodd,
}
/// The path function defined in css-shape-2.
///
/// https://drafts.csswg.org/css-shapes-2/#funcdef-path
#[css(comma)]
#[derive(
Animate, Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss,
)]
pub struct Path {
/// The filling rule for the svg path.
#[css(skip_if = "fill_is_default")]
#[animation(constant)]
pub fill: FillRule,
/// The svg path data.
pub path: SVGPathData,
}
// FIXME(nox): Implement ComputeSquaredDistance for T types and stop
// using PartialEq here, this will let us derive this impl.
impl<B, T, U> ComputeSquaredDistance for ShapeSource<B, T, U>
where
B: ComputeSquaredDistance,
T: PartialEq,
{
fn compute_squared_distance(&self, other: &Self) -> Result<SquaredDistance, ()> {
match (self, other) {
(
&ShapeSource::Shape(ref this, ref this_box),
&ShapeSource::Shape(ref other, ref other_box),
)
if this_box == other_box =>
|
,
(&ShapeSource::Path(ref this), &ShapeSource::Path(ref other))
if this.fill == other.fill =>
{
this.path.compute_squared_distance(&other.path)
},
_ => Err(()),
}
}
}
impl<B, T, U> ToAnimatedZero for ShapeSource<B, T, U> {
fn to_animated_zero(&self) -> Result<Self, ()> {
Err(())
}
}
impl<L> ToCss for InsetRect<L>
where
L: ToCss + PartialEq,
{
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
dest.write_str("inset(")?;
self.rect.to_css(dest)?;
if let Some(ref radius) = self.round {
dest.write_str(" round ")?;
radius.to_css(dest)?;
}
dest.write_str(")")
}
}
impl<L> Default for ShapeRadius<L> {
#[inline]
fn default() -> Self {
ShapeRadius::ClosestSide
}
}
impl<L> Animate for Polygon<L>
where
L: Animate,
{
fn animate(&self, other: &Self, procedure: Procedure) -> Result<Self, ()> {
if self.fill!= other.fill {
return Err(());
}
if self.coordinates.len()!= other.coordinates.len() {
return Err(());
}
let coordinates = self
.coordinates
.iter()
.zip(other.coordinates.iter())
.map(|(this, other)| {
Ok(PolygonCoord(
this.0.animate(&other.0, procedure)?,
this.1.animate(&other.1, procedure)?,
))
})
.collect::<Result<Vec<_>, _>>()?;
Ok(Polygon {
fill: self.fill,
coordinates,
})
}
}
impl<L> ComputeSquaredDistance for Polygon<L>
where
L: ComputeSquaredDistance,
{
fn compute_squared_distance(&self, other: &Self) -> Result<SquaredDistance, ()> {
if self.fill!= other.fill {
return Err(());
}
if self.coordinates.len()!= other.coordinates.len() {
return Err(());
}
self.coordinates
.iter()
.zip(other.coordinates.iter())
.map(|(this, other)| {
let d1 = this.0.compute_squared_distance(&other.0)?;
let d2 = this.1.compute_squared_distance(&other.1)?;
Ok(d1 + d2)
})
.sum()
}
}
impl Default for FillRule {
#[inline]
fn default() -> Self {
FillRule::Nonzero
}
}
#[inline]
fn fill_is_default(fill: &FillRule) -> bool {
*fill == FillRule::default()
}
|
{
this.compute_squared_distance(other)
}
|
conditional_block
|
oracle.rs
|
use aes;
use rand;
pub struct Oracle {
key: [u8; 16],
iv: [u8; 16],
}
impl Oracle {
pub fn new() -> Oracle {
Oracle {
key: random_block(),
iv: random_block()
}
}
#[cfg(test)]
pub fn controlled(key : &[u8; 16], iv: &[u8; 16]) -> Oracle {
Oracle { key: key.clone(), iv: iv.clone() }
}
pub fn encrypt(&self, message: &str) -> Vec<u8> {
let prefix = "comment1=cooking%20MCs;userdata=";
let message = message.replace("=", "%3D").replace(";", "%3B");
let suffix = ";comment2=%20like%20a%20pound%20of%20bacon";
let text: Vec<u8> =
prefix.bytes().chain(message.bytes()).chain(suffix.bytes()).collect();
aes::aes_cbc_encrypt(&text[..], &self.key, &self.iv)
}
pub fn is_admin(&self, ciphertext: &[u8]) -> bool {
if let Ok(decoded) = aes::aes_cbc_decrypt(&ciphertext, &self.key, &self.iv){
contains(&decoded, "user=admin".as_bytes())
}
else {
false
}
}
}
fn random_block() -> [u8; 16] {
use rand::Rng;
let mut rng = rand::thread_rng();
let mut block = [0u8; 16];
for el in block.iter_mut() {
*el = rng.gen::<u8>();
}
block
}
fn contains(container: &[u8], containee: &[u8]) -> bool {
for idx in 0..(container.len()-containee.len()) {
if container[idx..].starts_with(containee) {
return true;
}
}
false
}
#[cfg(test)]
mod tests {
use super::Oracle;
#[test]
fn empty_encrypt() {
let key = [0x79, 0x65, 0x6c, 0x6c, 0x6f, 0x77, 0x20, 0x73, 0x75, 0x62,
0x6d, 0x61, 0x72, 0x69, 0x6e, 0x65]; //"yellow submarine"
let iv = [0x74, 0x68, 0x65, 0x20, 0x31, 0x73, 0x74, 0x20, 0x31, 0x36,
0x20, 0x62, 0x79, 0x74, 0x65, 0x73]; //"the 1st 16 bytes"
let oracle = Oracle::controlled(&key, &iv);
let output = oracle.encrypt("");
let expected = vec![0x5f, 0x42, 0xc3, 0xdd, 0x32, 0xfe, 0x04, 0x86, 0x21,
0xc0, 0xea, 0xc1, 0x96, 0xbd, 0x01, 0xe4, 0x79, 0xdb,
0x1c, 0x4d, 0xd9, 0x78, 0x9a, 0x41, 0xac, 0xfd, 0x0a,
0xeb, 0xac, 0x3b, 0x47, 0x7a, 0xd1, 0x3d, 0x92, 0x2b,
0x40, 0x8a, 0x39, 0xd0, 0x34, 0xf9, 0x9e, 0x5b, 0x18,
0x3a, 0xbe, 0x51, 0x64, 0x6f, 0x21, 0x90, 0xc1, 0x64,
0x6b, 0xbe, 0x8a, 0x16, 0x2b, 0x41, 0x1c, 0x35, 0x02,
0x74, 0xd8, 0xcc, 0xaf, 0xd9, 0x57, 0xec, 0xd6, 0x46,
0x0c, 0x5d, 0x6f, 0xed, 0x04, 0x07, 0x40, 0x2b];
assert_eq!(output, expected);
}
#[test]
fn encrypt()
|
use super::contains;
#[test]
fn contains_pass() {
let container = vec![0,1,2,3,4];
let containee = vec![2,3];
assert_eq!(contains(&container[..], &containee[..]), true);
}
#[test]
fn contains_fail() {
let container = vec![0,1,2,3,4];
let containee = vec![3,4,5];
assert_eq!(contains(&container[..], &containee[..]), false);
}
}
|
{
let key = [0x79, 0x65, 0x6c, 0x6c, 0x6f, 0x77, 0x20, 0x73, 0x75, 0x62,
0x6d, 0x61, 0x72, 0x69, 0x6e, 0x65]; //"yellow submarine"
let iv = [0x74, 0x68, 0x65, 0x20, 0x31, 0x73, 0x74, 0x20, 0x31, 0x36,
0x20, 0x62, 0x79, 0x74, 0x65, 0x73]; //"the 1st 16 bytes"
let oracle = Oracle::controlled(&key, &iv);
let output = oracle.encrypt("hello world");
let expected = vec![0x5f, 0x42, 0xc3, 0xdd, 0x32, 0xfe, 0x04, 0x86, 0x21,
0xc0, 0xea, 0xc1, 0x96, 0xbd, 0x01, 0xe4, 0x79, 0xdb,
0x1c, 0x4d, 0xd9, 0x78, 0x9a, 0x41, 0xac, 0xfd, 0x0a,
0xeb, 0xac, 0x3b, 0x47, 0x7a, 0x76, 0xa0, 0x56, 0x48,
0x12, 0xd4, 0x44, 0xd1, 0x20, 0x21, 0xdd, 0xbe, 0x10,
0x3b, 0xd6, 0x78, 0x97, 0x1e, 0xb8, 0x8c, 0xcc, 0xba,
0x59, 0x89, 0xb0, 0xa1, 0x42, 0x32, 0x8f, 0xf0, 0x5b,
0x23, 0x0d, 0x77, 0x21, 0x8e, 0xcb, 0xdf, 0x14, 0x93,
0x73, 0xb5, 0x3a, 0xce, 0xa6, 0x8b, 0xf5, 0x30, 0xe5,
0x6b, 0xcb, 0x14, 0x8b, 0xbc, 0xa3, 0xc0, 0xbe, 0xe3,
0x40, 0x89, 0xc6, 0xb3, 0x9b, 0xfb];
assert_eq!(output, expected);
}
|
identifier_body
|
oracle.rs
|
use aes;
use rand;
pub struct Oracle {
key: [u8; 16],
iv: [u8; 16],
}
impl Oracle {
pub fn new() -> Oracle {
Oracle {
key: random_block(),
iv: random_block()
}
}
#[cfg(test)]
pub fn controlled(key : &[u8; 16], iv: &[u8; 16]) -> Oracle {
Oracle { key: key.clone(), iv: iv.clone() }
}
pub fn encrypt(&self, message: &str) -> Vec<u8> {
let prefix = "comment1=cooking%20MCs;userdata=";
let message = message.replace("=", "%3D").replace(";", "%3B");
let suffix = ";comment2=%20like%20a%20pound%20of%20bacon";
let text: Vec<u8> =
prefix.bytes().chain(message.bytes()).chain(suffix.bytes()).collect();
aes::aes_cbc_encrypt(&text[..], &self.key, &self.iv)
}
pub fn is_admin(&self, ciphertext: &[u8]) -> bool {
if let Ok(decoded) = aes::aes_cbc_decrypt(&ciphertext, &self.key, &self.iv){
contains(&decoded, "user=admin".as_bytes())
}
else {
false
}
}
}
fn random_block() -> [u8; 16] {
use rand::Rng;
let mut rng = rand::thread_rng();
let mut block = [0u8; 16];
for el in block.iter_mut() {
*el = rng.gen::<u8>();
}
block
}
fn contains(container: &[u8], containee: &[u8]) -> bool {
for idx in 0..(container.len()-containee.len()) {
if container[idx..].starts_with(containee) {
return true;
}
}
false
}
#[cfg(test)]
mod tests {
use super::Oracle;
#[test]
fn empty_encrypt() {
let key = [0x79, 0x65, 0x6c, 0x6c, 0x6f, 0x77, 0x20, 0x73, 0x75, 0x62,
0x6d, 0x61, 0x72, 0x69, 0x6e, 0x65]; //"yellow submarine"
let iv = [0x74, 0x68, 0x65, 0x20, 0x31, 0x73, 0x74, 0x20, 0x31, 0x36,
0x20, 0x62, 0x79, 0x74, 0x65, 0x73]; //"the 1st 16 bytes"
let oracle = Oracle::controlled(&key, &iv);
|
0xeb, 0xac, 0x3b, 0x47, 0x7a, 0xd1, 0x3d, 0x92, 0x2b,
0x40, 0x8a, 0x39, 0xd0, 0x34, 0xf9, 0x9e, 0x5b, 0x18,
0x3a, 0xbe, 0x51, 0x64, 0x6f, 0x21, 0x90, 0xc1, 0x64,
0x6b, 0xbe, 0x8a, 0x16, 0x2b, 0x41, 0x1c, 0x35, 0x02,
0x74, 0xd8, 0xcc, 0xaf, 0xd9, 0x57, 0xec, 0xd6, 0x46,
0x0c, 0x5d, 0x6f, 0xed, 0x04, 0x07, 0x40, 0x2b];
assert_eq!(output, expected);
}
#[test]
fn encrypt() {
let key = [0x79, 0x65, 0x6c, 0x6c, 0x6f, 0x77, 0x20, 0x73, 0x75, 0x62,
0x6d, 0x61, 0x72, 0x69, 0x6e, 0x65]; //"yellow submarine"
let iv = [0x74, 0x68, 0x65, 0x20, 0x31, 0x73, 0x74, 0x20, 0x31, 0x36,
0x20, 0x62, 0x79, 0x74, 0x65, 0x73]; //"the 1st 16 bytes"
let oracle = Oracle::controlled(&key, &iv);
let output = oracle.encrypt("hello world");
let expected = vec![0x5f, 0x42, 0xc3, 0xdd, 0x32, 0xfe, 0x04, 0x86, 0x21,
0xc0, 0xea, 0xc1, 0x96, 0xbd, 0x01, 0xe4, 0x79, 0xdb,
0x1c, 0x4d, 0xd9, 0x78, 0x9a, 0x41, 0xac, 0xfd, 0x0a,
0xeb, 0xac, 0x3b, 0x47, 0x7a, 0x76, 0xa0, 0x56, 0x48,
0x12, 0xd4, 0x44, 0xd1, 0x20, 0x21, 0xdd, 0xbe, 0x10,
0x3b, 0xd6, 0x78, 0x97, 0x1e, 0xb8, 0x8c, 0xcc, 0xba,
0x59, 0x89, 0xb0, 0xa1, 0x42, 0x32, 0x8f, 0xf0, 0x5b,
0x23, 0x0d, 0x77, 0x21, 0x8e, 0xcb, 0xdf, 0x14, 0x93,
0x73, 0xb5, 0x3a, 0xce, 0xa6, 0x8b, 0xf5, 0x30, 0xe5,
0x6b, 0xcb, 0x14, 0x8b, 0xbc, 0xa3, 0xc0, 0xbe, 0xe3,
0x40, 0x89, 0xc6, 0xb3, 0x9b, 0xfb];
assert_eq!(output, expected);
}
use super::contains;
#[test]
fn contains_pass() {
let container = vec![0,1,2,3,4];
let containee = vec![2,3];
assert_eq!(contains(&container[..], &containee[..]), true);
}
#[test]
fn contains_fail() {
let container = vec![0,1,2,3,4];
let containee = vec![3,4,5];
assert_eq!(contains(&container[..], &containee[..]), false);
}
}
|
let output = oracle.encrypt("");
let expected = vec![0x5f, 0x42, 0xc3, 0xdd, 0x32, 0xfe, 0x04, 0x86, 0x21,
0xc0, 0xea, 0xc1, 0x96, 0xbd, 0x01, 0xe4, 0x79, 0xdb,
0x1c, 0x4d, 0xd9, 0x78, 0x9a, 0x41, 0xac, 0xfd, 0x0a,
|
random_line_split
|
oracle.rs
|
use aes;
use rand;
pub struct Oracle {
key: [u8; 16],
iv: [u8; 16],
}
impl Oracle {
pub fn new() -> Oracle {
Oracle {
key: random_block(),
iv: random_block()
}
}
#[cfg(test)]
pub fn controlled(key : &[u8; 16], iv: &[u8; 16]) -> Oracle {
Oracle { key: key.clone(), iv: iv.clone() }
}
pub fn encrypt(&self, message: &str) -> Vec<u8> {
let prefix = "comment1=cooking%20MCs;userdata=";
let message = message.replace("=", "%3D").replace(";", "%3B");
let suffix = ";comment2=%20like%20a%20pound%20of%20bacon";
let text: Vec<u8> =
prefix.bytes().chain(message.bytes()).chain(suffix.bytes()).collect();
aes::aes_cbc_encrypt(&text[..], &self.key, &self.iv)
}
pub fn is_admin(&self, ciphertext: &[u8]) -> bool {
if let Ok(decoded) = aes::aes_cbc_decrypt(&ciphertext, &self.key, &self.iv){
contains(&decoded, "user=admin".as_bytes())
}
else {
false
}
}
}
fn
|
() -> [u8; 16] {
use rand::Rng;
let mut rng = rand::thread_rng();
let mut block = [0u8; 16];
for el in block.iter_mut() {
*el = rng.gen::<u8>();
}
block
}
fn contains(container: &[u8], containee: &[u8]) -> bool {
for idx in 0..(container.len()-containee.len()) {
if container[idx..].starts_with(containee) {
return true;
}
}
false
}
#[cfg(test)]
mod tests {
use super::Oracle;
#[test]
fn empty_encrypt() {
let key = [0x79, 0x65, 0x6c, 0x6c, 0x6f, 0x77, 0x20, 0x73, 0x75, 0x62,
0x6d, 0x61, 0x72, 0x69, 0x6e, 0x65]; //"yellow submarine"
let iv = [0x74, 0x68, 0x65, 0x20, 0x31, 0x73, 0x74, 0x20, 0x31, 0x36,
0x20, 0x62, 0x79, 0x74, 0x65, 0x73]; //"the 1st 16 bytes"
let oracle = Oracle::controlled(&key, &iv);
let output = oracle.encrypt("");
let expected = vec![0x5f, 0x42, 0xc3, 0xdd, 0x32, 0xfe, 0x04, 0x86, 0x21,
0xc0, 0xea, 0xc1, 0x96, 0xbd, 0x01, 0xe4, 0x79, 0xdb,
0x1c, 0x4d, 0xd9, 0x78, 0x9a, 0x41, 0xac, 0xfd, 0x0a,
0xeb, 0xac, 0x3b, 0x47, 0x7a, 0xd1, 0x3d, 0x92, 0x2b,
0x40, 0x8a, 0x39, 0xd0, 0x34, 0xf9, 0x9e, 0x5b, 0x18,
0x3a, 0xbe, 0x51, 0x64, 0x6f, 0x21, 0x90, 0xc1, 0x64,
0x6b, 0xbe, 0x8a, 0x16, 0x2b, 0x41, 0x1c, 0x35, 0x02,
0x74, 0xd8, 0xcc, 0xaf, 0xd9, 0x57, 0xec, 0xd6, 0x46,
0x0c, 0x5d, 0x6f, 0xed, 0x04, 0x07, 0x40, 0x2b];
assert_eq!(output, expected);
}
#[test]
fn encrypt() {
let key = [0x79, 0x65, 0x6c, 0x6c, 0x6f, 0x77, 0x20, 0x73, 0x75, 0x62,
0x6d, 0x61, 0x72, 0x69, 0x6e, 0x65]; //"yellow submarine"
let iv = [0x74, 0x68, 0x65, 0x20, 0x31, 0x73, 0x74, 0x20, 0x31, 0x36,
0x20, 0x62, 0x79, 0x74, 0x65, 0x73]; //"the 1st 16 bytes"
let oracle = Oracle::controlled(&key, &iv);
let output = oracle.encrypt("hello world");
let expected = vec![0x5f, 0x42, 0xc3, 0xdd, 0x32, 0xfe, 0x04, 0x86, 0x21,
0xc0, 0xea, 0xc1, 0x96, 0xbd, 0x01, 0xe4, 0x79, 0xdb,
0x1c, 0x4d, 0xd9, 0x78, 0x9a, 0x41, 0xac, 0xfd, 0x0a,
0xeb, 0xac, 0x3b, 0x47, 0x7a, 0x76, 0xa0, 0x56, 0x48,
0x12, 0xd4, 0x44, 0xd1, 0x20, 0x21, 0xdd, 0xbe, 0x10,
0x3b, 0xd6, 0x78, 0x97, 0x1e, 0xb8, 0x8c, 0xcc, 0xba,
0x59, 0x89, 0xb0, 0xa1, 0x42, 0x32, 0x8f, 0xf0, 0x5b,
0x23, 0x0d, 0x77, 0x21, 0x8e, 0xcb, 0xdf, 0x14, 0x93,
0x73, 0xb5, 0x3a, 0xce, 0xa6, 0x8b, 0xf5, 0x30, 0xe5,
0x6b, 0xcb, 0x14, 0x8b, 0xbc, 0xa3, 0xc0, 0xbe, 0xe3,
0x40, 0x89, 0xc6, 0xb3, 0x9b, 0xfb];
assert_eq!(output, expected);
}
use super::contains;
#[test]
fn contains_pass() {
let container = vec![0,1,2,3,4];
let containee = vec![2,3];
assert_eq!(contains(&container[..], &containee[..]), true);
}
#[test]
fn contains_fail() {
let container = vec![0,1,2,3,4];
let containee = vec![3,4,5];
assert_eq!(contains(&container[..], &containee[..]), false);
}
}
|
random_block
|
identifier_name
|
oracle.rs
|
use aes;
use rand;
pub struct Oracle {
key: [u8; 16],
iv: [u8; 16],
}
impl Oracle {
pub fn new() -> Oracle {
Oracle {
key: random_block(),
iv: random_block()
}
}
#[cfg(test)]
pub fn controlled(key : &[u8; 16], iv: &[u8; 16]) -> Oracle {
Oracle { key: key.clone(), iv: iv.clone() }
}
pub fn encrypt(&self, message: &str) -> Vec<u8> {
let prefix = "comment1=cooking%20MCs;userdata=";
let message = message.replace("=", "%3D").replace(";", "%3B");
let suffix = ";comment2=%20like%20a%20pound%20of%20bacon";
let text: Vec<u8> =
prefix.bytes().chain(message.bytes()).chain(suffix.bytes()).collect();
aes::aes_cbc_encrypt(&text[..], &self.key, &self.iv)
}
pub fn is_admin(&self, ciphertext: &[u8]) -> bool {
if let Ok(decoded) = aes::aes_cbc_decrypt(&ciphertext, &self.key, &self.iv)
|
else {
false
}
}
}
fn random_block() -> [u8; 16] {
use rand::Rng;
let mut rng = rand::thread_rng();
let mut block = [0u8; 16];
for el in block.iter_mut() {
*el = rng.gen::<u8>();
}
block
}
fn contains(container: &[u8], containee: &[u8]) -> bool {
for idx in 0..(container.len()-containee.len()) {
if container[idx..].starts_with(containee) {
return true;
}
}
false
}
#[cfg(test)]
mod tests {
use super::Oracle;
#[test]
fn empty_encrypt() {
let key = [0x79, 0x65, 0x6c, 0x6c, 0x6f, 0x77, 0x20, 0x73, 0x75, 0x62,
0x6d, 0x61, 0x72, 0x69, 0x6e, 0x65]; //"yellow submarine"
let iv = [0x74, 0x68, 0x65, 0x20, 0x31, 0x73, 0x74, 0x20, 0x31, 0x36,
0x20, 0x62, 0x79, 0x74, 0x65, 0x73]; //"the 1st 16 bytes"
let oracle = Oracle::controlled(&key, &iv);
let output = oracle.encrypt("");
let expected = vec![0x5f, 0x42, 0xc3, 0xdd, 0x32, 0xfe, 0x04, 0x86, 0x21,
0xc0, 0xea, 0xc1, 0x96, 0xbd, 0x01, 0xe4, 0x79, 0xdb,
0x1c, 0x4d, 0xd9, 0x78, 0x9a, 0x41, 0xac, 0xfd, 0x0a,
0xeb, 0xac, 0x3b, 0x47, 0x7a, 0xd1, 0x3d, 0x92, 0x2b,
0x40, 0x8a, 0x39, 0xd0, 0x34, 0xf9, 0x9e, 0x5b, 0x18,
0x3a, 0xbe, 0x51, 0x64, 0x6f, 0x21, 0x90, 0xc1, 0x64,
0x6b, 0xbe, 0x8a, 0x16, 0x2b, 0x41, 0x1c, 0x35, 0x02,
0x74, 0xd8, 0xcc, 0xaf, 0xd9, 0x57, 0xec, 0xd6, 0x46,
0x0c, 0x5d, 0x6f, 0xed, 0x04, 0x07, 0x40, 0x2b];
assert_eq!(output, expected);
}
#[test]
fn encrypt() {
let key = [0x79, 0x65, 0x6c, 0x6c, 0x6f, 0x77, 0x20, 0x73, 0x75, 0x62,
0x6d, 0x61, 0x72, 0x69, 0x6e, 0x65]; //"yellow submarine"
let iv = [0x74, 0x68, 0x65, 0x20, 0x31, 0x73, 0x74, 0x20, 0x31, 0x36,
0x20, 0x62, 0x79, 0x74, 0x65, 0x73]; //"the 1st 16 bytes"
let oracle = Oracle::controlled(&key, &iv);
let output = oracle.encrypt("hello world");
let expected = vec![0x5f, 0x42, 0xc3, 0xdd, 0x32, 0xfe, 0x04, 0x86, 0x21,
0xc0, 0xea, 0xc1, 0x96, 0xbd, 0x01, 0xe4, 0x79, 0xdb,
0x1c, 0x4d, 0xd9, 0x78, 0x9a, 0x41, 0xac, 0xfd, 0x0a,
0xeb, 0xac, 0x3b, 0x47, 0x7a, 0x76, 0xa0, 0x56, 0x48,
0x12, 0xd4, 0x44, 0xd1, 0x20, 0x21, 0xdd, 0xbe, 0x10,
0x3b, 0xd6, 0x78, 0x97, 0x1e, 0xb8, 0x8c, 0xcc, 0xba,
0x59, 0x89, 0xb0, 0xa1, 0x42, 0x32, 0x8f, 0xf0, 0x5b,
0x23, 0x0d, 0x77, 0x21, 0x8e, 0xcb, 0xdf, 0x14, 0x93,
0x73, 0xb5, 0x3a, 0xce, 0xa6, 0x8b, 0xf5, 0x30, 0xe5,
0x6b, 0xcb, 0x14, 0x8b, 0xbc, 0xa3, 0xc0, 0xbe, 0xe3,
0x40, 0x89, 0xc6, 0xb3, 0x9b, 0xfb];
assert_eq!(output, expected);
}
use super::contains;
#[test]
fn contains_pass() {
let container = vec![0,1,2,3,4];
let containee = vec![2,3];
assert_eq!(contains(&container[..], &containee[..]), true);
}
#[test]
fn contains_fail() {
let container = vec![0,1,2,3,4];
let containee = vec![3,4,5];
assert_eq!(contains(&container[..], &containee[..]), false);
}
}
|
{
contains(&decoded, "user=admin".as_bytes())
}
|
conditional_block
|
rule_list.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A list of CSS rules.
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOfOps};
use servo_arc::{Arc, RawOffsetArc};
use shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked, SharedRwLock, SharedRwLockReadGuard};
use stylesheets::{CssRule, RulesMutateError};
use stylesheets::loader::StylesheetLoader;
use stylesheets::rule_parser::State;
use stylesheets::stylesheet::StylesheetContents;
/// A list of CSS rules.
#[derive(Debug)]
pub struct CssRules(pub Vec<CssRule>);
impl CssRules {
/// Whether this CSS rules is empty.
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
impl DeepCloneWithLock for CssRules {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self {
CssRules(self.0.iter().map(|x| {
x.deep_clone_with_lock(lock, guard, params)
}).collect())
}
}
impl CssRules {
/// Measure heap usage.
#[cfg(feature = "gecko")]
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.0.shallow_size_of(ops);
for rule in self.0.iter() {
n += rule.size_of(guard, ops);
}
n
}
/// Trivially construct a new set of CSS rules.
pub fn new(rules: Vec<CssRule>, shared_lock: &SharedRwLock) -> Arc<Locked<CssRules>> {
Arc::new(shared_lock.wrap(CssRules(rules)))
}
/// Returns whether all the rules in this list are namespace or import
/// rules.
fn only_ns_or_import(&self) -> bool {
self.0.iter().all(|r| {
match *r {
CssRule::Namespace(..) |
CssRule::Import(..) => true,
_ => false
}
})
}
/// <https://drafts.csswg.org/cssom/#remove-a-css-rule>
pub fn remove_rule(&mut self, index: usize) -> Result<(), RulesMutateError> {
// Step 1, 2
if index >= self.0.len() {
return Err(RulesMutateError::IndexSize);
}
{
// Step 3
let ref rule = self.0[index];
// Step 4
if let CssRule::Namespace(..) = *rule {
if!self.only_ns_or_import()
|
}
}
// Step 5, 6
self.0.remove(index);
Ok(())
}
}
/// A trait to implement helpers for `Arc<Locked<CssRules>>`.
pub trait CssRulesHelpers {
/// <https://drafts.csswg.org/cssom/#insert-a-css-rule>
///
/// Written in this funky way because parsing an @import rule may cause us
/// to clone a stylesheet from the same document due to caching in the CSS
/// loader.
///
/// TODO(emilio): We could also pass the write guard down into the loader
/// instead, but that seems overkill.
fn insert_rule(&self,
lock: &SharedRwLock,
rule: &str,
parent_stylesheet_contents: &StylesheetContents,
index: usize,
nested: bool,
loader: Option<&StylesheetLoader>)
-> Result<CssRule, RulesMutateError>;
}
impl CssRulesHelpers for RawOffsetArc<Locked<CssRules>> {
fn insert_rule(&self,
lock: &SharedRwLock,
rule: &str,
parent_stylesheet_contents: &StylesheetContents,
index: usize,
nested: bool,
loader: Option<&StylesheetLoader>)
-> Result<CssRule, RulesMutateError> {
let state = {
let read_guard = lock.read();
let rules = self.read_with(&read_guard);
// Step 1, 2
if index > rules.0.len() {
return Err(RulesMutateError::IndexSize);
}
// Computes the parser state at the given index
if nested {
None
} else if index == 0 {
Some(State::Start)
} else {
rules.0.get(index - 1).map(CssRule::rule_state)
}
};
// Step 3, 4
// XXXManishearth should we also store the namespace map?
let (new_rule, new_state) =
CssRule::parse(
&rule,
parent_stylesheet_contents,
lock,
state,
loader
)?;
{
let mut write_guard = lock.write();
let rules = self.write_with(&mut write_guard);
// Step 5
// Computes the maximum allowed parser state at a given index.
let rev_state = rules.0.get(index).map_or(State::Body, CssRule::rule_state);
if new_state > rev_state {
// We inserted a rule too early, e.g. inserting
// a regular style rule before @namespace rules
return Err(RulesMutateError::HierarchyRequest);
}
// Step 6
if let CssRule::Namespace(..) = new_rule {
if!rules.only_ns_or_import() {
return Err(RulesMutateError::InvalidState);
}
}
rules.0.insert(index, new_rule.clone());
}
Ok(new_rule)
}
}
|
{
return Err(RulesMutateError::InvalidState);
}
|
conditional_block
|
rule_list.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A list of CSS rules.
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOfOps};
use servo_arc::{Arc, RawOffsetArc};
use shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked, SharedRwLock, SharedRwLockReadGuard};
use stylesheets::{CssRule, RulesMutateError};
use stylesheets::loader::StylesheetLoader;
use stylesheets::rule_parser::State;
use stylesheets::stylesheet::StylesheetContents;
/// A list of CSS rules.
#[derive(Debug)]
pub struct CssRules(pub Vec<CssRule>);
impl CssRules {
/// Whether this CSS rules is empty.
pub fn
|
(&self) -> bool {
self.0.is_empty()
}
}
impl DeepCloneWithLock for CssRules {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self {
CssRules(self.0.iter().map(|x| {
x.deep_clone_with_lock(lock, guard, params)
}).collect())
}
}
impl CssRules {
/// Measure heap usage.
#[cfg(feature = "gecko")]
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.0.shallow_size_of(ops);
for rule in self.0.iter() {
n += rule.size_of(guard, ops);
}
n
}
/// Trivially construct a new set of CSS rules.
pub fn new(rules: Vec<CssRule>, shared_lock: &SharedRwLock) -> Arc<Locked<CssRules>> {
Arc::new(shared_lock.wrap(CssRules(rules)))
}
/// Returns whether all the rules in this list are namespace or import
/// rules.
fn only_ns_or_import(&self) -> bool {
self.0.iter().all(|r| {
match *r {
CssRule::Namespace(..) |
CssRule::Import(..) => true,
_ => false
}
})
}
/// <https://drafts.csswg.org/cssom/#remove-a-css-rule>
pub fn remove_rule(&mut self, index: usize) -> Result<(), RulesMutateError> {
// Step 1, 2
if index >= self.0.len() {
return Err(RulesMutateError::IndexSize);
}
{
// Step 3
let ref rule = self.0[index];
// Step 4
if let CssRule::Namespace(..) = *rule {
if!self.only_ns_or_import() {
return Err(RulesMutateError::InvalidState);
}
}
}
// Step 5, 6
self.0.remove(index);
Ok(())
}
}
/// A trait to implement helpers for `Arc<Locked<CssRules>>`.
pub trait CssRulesHelpers {
/// <https://drafts.csswg.org/cssom/#insert-a-css-rule>
///
/// Written in this funky way because parsing an @import rule may cause us
/// to clone a stylesheet from the same document due to caching in the CSS
/// loader.
///
/// TODO(emilio): We could also pass the write guard down into the loader
/// instead, but that seems overkill.
fn insert_rule(&self,
lock: &SharedRwLock,
rule: &str,
parent_stylesheet_contents: &StylesheetContents,
index: usize,
nested: bool,
loader: Option<&StylesheetLoader>)
-> Result<CssRule, RulesMutateError>;
}
impl CssRulesHelpers for RawOffsetArc<Locked<CssRules>> {
fn insert_rule(&self,
lock: &SharedRwLock,
rule: &str,
parent_stylesheet_contents: &StylesheetContents,
index: usize,
nested: bool,
loader: Option<&StylesheetLoader>)
-> Result<CssRule, RulesMutateError> {
let state = {
let read_guard = lock.read();
let rules = self.read_with(&read_guard);
// Step 1, 2
if index > rules.0.len() {
return Err(RulesMutateError::IndexSize);
}
// Computes the parser state at the given index
if nested {
None
} else if index == 0 {
Some(State::Start)
} else {
rules.0.get(index - 1).map(CssRule::rule_state)
}
};
// Step 3, 4
// XXXManishearth should we also store the namespace map?
let (new_rule, new_state) =
CssRule::parse(
&rule,
parent_stylesheet_contents,
lock,
state,
loader
)?;
{
let mut write_guard = lock.write();
let rules = self.write_with(&mut write_guard);
// Step 5
// Computes the maximum allowed parser state at a given index.
let rev_state = rules.0.get(index).map_or(State::Body, CssRule::rule_state);
if new_state > rev_state {
// We inserted a rule too early, e.g. inserting
// a regular style rule before @namespace rules
return Err(RulesMutateError::HierarchyRequest);
}
// Step 6
if let CssRule::Namespace(..) = new_rule {
if!rules.only_ns_or_import() {
return Err(RulesMutateError::InvalidState);
}
}
rules.0.insert(index, new_rule.clone());
}
Ok(new_rule)
}
}
|
is_empty
|
identifier_name
|
rule_list.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A list of CSS rules.
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOfOps};
use servo_arc::{Arc, RawOffsetArc};
use shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked, SharedRwLock, SharedRwLockReadGuard};
use stylesheets::{CssRule, RulesMutateError};
use stylesheets::loader::StylesheetLoader;
use stylesheets::rule_parser::State;
use stylesheets::stylesheet::StylesheetContents;
/// A list of CSS rules.
#[derive(Debug)]
pub struct CssRules(pub Vec<CssRule>);
impl CssRules {
/// Whether this CSS rules is empty.
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
impl DeepCloneWithLock for CssRules {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self {
CssRules(self.0.iter().map(|x| {
x.deep_clone_with_lock(lock, guard, params)
}).collect())
}
}
impl CssRules {
/// Measure heap usage.
#[cfg(feature = "gecko")]
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.0.shallow_size_of(ops);
for rule in self.0.iter() {
n += rule.size_of(guard, ops);
}
n
}
/// Trivially construct a new set of CSS rules.
pub fn new(rules: Vec<CssRule>, shared_lock: &SharedRwLock) -> Arc<Locked<CssRules>> {
Arc::new(shared_lock.wrap(CssRules(rules)))
}
/// Returns whether all the rules in this list are namespace or import
/// rules.
fn only_ns_or_import(&self) -> bool {
self.0.iter().all(|r| {
match *r {
CssRule::Namespace(..) |
CssRule::Import(..) => true,
_ => false
}
})
}
/// <https://drafts.csswg.org/cssom/#remove-a-css-rule>
pub fn remove_rule(&mut self, index: usize) -> Result<(), RulesMutateError> {
// Step 1, 2
if index >= self.0.len() {
return Err(RulesMutateError::IndexSize);
}
{
// Step 3
let ref rule = self.0[index];
// Step 4
if let CssRule::Namespace(..) = *rule {
if!self.only_ns_or_import() {
return Err(RulesMutateError::InvalidState);
}
}
}
// Step 5, 6
self.0.remove(index);
Ok(())
}
}
/// A trait to implement helpers for `Arc<Locked<CssRules>>`.
pub trait CssRulesHelpers {
/// <https://drafts.csswg.org/cssom/#insert-a-css-rule>
///
/// Written in this funky way because parsing an @import rule may cause us
/// to clone a stylesheet from the same document due to caching in the CSS
/// loader.
///
/// TODO(emilio): We could also pass the write guard down into the loader
/// instead, but that seems overkill.
fn insert_rule(&self,
lock: &SharedRwLock,
rule: &str,
parent_stylesheet_contents: &StylesheetContents,
index: usize,
nested: bool,
loader: Option<&StylesheetLoader>)
-> Result<CssRule, RulesMutateError>;
}
impl CssRulesHelpers for RawOffsetArc<Locked<CssRules>> {
fn insert_rule(&self,
lock: &SharedRwLock,
rule: &str,
parent_stylesheet_contents: &StylesheetContents,
index: usize,
nested: bool,
loader: Option<&StylesheetLoader>)
-> Result<CssRule, RulesMutateError>
|
// Step 3, 4
// XXXManishearth should we also store the namespace map?
let (new_rule, new_state) =
CssRule::parse(
&rule,
parent_stylesheet_contents,
lock,
state,
loader
)?;
{
let mut write_guard = lock.write();
let rules = self.write_with(&mut write_guard);
// Step 5
// Computes the maximum allowed parser state at a given index.
let rev_state = rules.0.get(index).map_or(State::Body, CssRule::rule_state);
if new_state > rev_state {
// We inserted a rule too early, e.g. inserting
// a regular style rule before @namespace rules
return Err(RulesMutateError::HierarchyRequest);
}
// Step 6
if let CssRule::Namespace(..) = new_rule {
if!rules.only_ns_or_import() {
return Err(RulesMutateError::InvalidState);
}
}
rules.0.insert(index, new_rule.clone());
}
Ok(new_rule)
}
}
|
{
let state = {
let read_guard = lock.read();
let rules = self.read_with(&read_guard);
// Step 1, 2
if index > rules.0.len() {
return Err(RulesMutateError::IndexSize);
}
// Computes the parser state at the given index
if nested {
None
} else if index == 0 {
Some(State::Start)
} else {
rules.0.get(index - 1).map(CssRule::rule_state)
}
};
|
identifier_body
|
rule_list.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A list of CSS rules.
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOfOps};
use servo_arc::{Arc, RawOffsetArc};
use shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked, SharedRwLock, SharedRwLockReadGuard};
use stylesheets::{CssRule, RulesMutateError};
use stylesheets::loader::StylesheetLoader;
use stylesheets::rule_parser::State;
use stylesheets::stylesheet::StylesheetContents;
/// A list of CSS rules.
#[derive(Debug)]
pub struct CssRules(pub Vec<CssRule>);
impl CssRules {
/// Whether this CSS rules is empty.
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
impl DeepCloneWithLock for CssRules {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self {
CssRules(self.0.iter().map(|x| {
x.deep_clone_with_lock(lock, guard, params)
}).collect())
}
}
|
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.0.shallow_size_of(ops);
for rule in self.0.iter() {
n += rule.size_of(guard, ops);
}
n
}
/// Trivially construct a new set of CSS rules.
pub fn new(rules: Vec<CssRule>, shared_lock: &SharedRwLock) -> Arc<Locked<CssRules>> {
Arc::new(shared_lock.wrap(CssRules(rules)))
}
/// Returns whether all the rules in this list are namespace or import
/// rules.
fn only_ns_or_import(&self) -> bool {
self.0.iter().all(|r| {
match *r {
CssRule::Namespace(..) |
CssRule::Import(..) => true,
_ => false
}
})
}
/// <https://drafts.csswg.org/cssom/#remove-a-css-rule>
pub fn remove_rule(&mut self, index: usize) -> Result<(), RulesMutateError> {
// Step 1, 2
if index >= self.0.len() {
return Err(RulesMutateError::IndexSize);
}
{
// Step 3
let ref rule = self.0[index];
// Step 4
if let CssRule::Namespace(..) = *rule {
if!self.only_ns_or_import() {
return Err(RulesMutateError::InvalidState);
}
}
}
// Step 5, 6
self.0.remove(index);
Ok(())
}
}
/// A trait to implement helpers for `Arc<Locked<CssRules>>`.
pub trait CssRulesHelpers {
/// <https://drafts.csswg.org/cssom/#insert-a-css-rule>
///
/// Written in this funky way because parsing an @import rule may cause us
/// to clone a stylesheet from the same document due to caching in the CSS
/// loader.
///
/// TODO(emilio): We could also pass the write guard down into the loader
/// instead, but that seems overkill.
fn insert_rule(&self,
lock: &SharedRwLock,
rule: &str,
parent_stylesheet_contents: &StylesheetContents,
index: usize,
nested: bool,
loader: Option<&StylesheetLoader>)
-> Result<CssRule, RulesMutateError>;
}
impl CssRulesHelpers for RawOffsetArc<Locked<CssRules>> {
fn insert_rule(&self,
lock: &SharedRwLock,
rule: &str,
parent_stylesheet_contents: &StylesheetContents,
index: usize,
nested: bool,
loader: Option<&StylesheetLoader>)
-> Result<CssRule, RulesMutateError> {
let state = {
let read_guard = lock.read();
let rules = self.read_with(&read_guard);
// Step 1, 2
if index > rules.0.len() {
return Err(RulesMutateError::IndexSize);
}
// Computes the parser state at the given index
if nested {
None
} else if index == 0 {
Some(State::Start)
} else {
rules.0.get(index - 1).map(CssRule::rule_state)
}
};
// Step 3, 4
// XXXManishearth should we also store the namespace map?
let (new_rule, new_state) =
CssRule::parse(
&rule,
parent_stylesheet_contents,
lock,
state,
loader
)?;
{
let mut write_guard = lock.write();
let rules = self.write_with(&mut write_guard);
// Step 5
// Computes the maximum allowed parser state at a given index.
let rev_state = rules.0.get(index).map_or(State::Body, CssRule::rule_state);
if new_state > rev_state {
// We inserted a rule too early, e.g. inserting
// a regular style rule before @namespace rules
return Err(RulesMutateError::HierarchyRequest);
}
// Step 6
if let CssRule::Namespace(..) = new_rule {
if!rules.only_ns_or_import() {
return Err(RulesMutateError::InvalidState);
}
}
rules.0.insert(index, new_rule.clone());
}
Ok(new_rule)
}
}
|
impl CssRules {
/// Measure heap usage.
#[cfg(feature = "gecko")]
|
random_line_split
|
digitalocean.rs
|
use super::command;
use super::chain;
fn get_subdomain_from_name(name: &str) -> &str {
// For subdomains, we only take the first element when split by ".", this allows
// us to create naked domain names or use the same subdomain across domains.
let components: Vec<&str> = name.split(".").collect();
if components.len() > 2 {
// If first throws an error after we've checked length, panic
components.first().unwrap()
} else {
&"@"
}
}
pub fn create_droplet_by_name(name: &str, region: Option<&str>, size: Option<&str>, domain: Option<&str>,
enable_backups: Option<&str>) {
let ssh_key_mapping_func = |res: &command::Result, cmd_str: String| -> String {
let ids : Vec<&str> = res.stdout.lines().collect();
let new_cmd = str::replace(&cmd_str, "%ssh_keys%", &ids.join(","));
new_cmd.to_string()
};
let ip_address_mapping_func = |res: &command::Result, cmd_str: String| -> String {
let mut ip_address : Option<String> = None;
let res_stdout = res.stdout.clone();
for line in res_stdout.lines() {
if line.starts_with(name) {
debug!("Found: {}", line);
let fields : Vec<&str> = line.split_whitespace().collect();
if fields.len() < 2 {
warn!("Couldn't find ip address in line: {}", line);
}
ip_address = Some(fields[1].to_string());
break;
}
}
if ip_address == None {
error!("Couldn't locate droplet in output: {}", res_stdout);
return "--will fail--".to_string()
}
let new_cmd = str::replace(&cmd_str, "%ip_address%", &ip_address.unwrap());
new_cmd.to_string()
};
let subdomain = get_subdomain_from_name(name);
let enable_backups_string: &str;
match enable_backups {
Some("y") => enable_backups_string = "--enable-backups",
Some("Y") => enable_backups_string = "--enable-backups",
Some(_) => enable_backups_string = "",
_ => enable_backups_string = ""
}
let create_str = format!("doctl compute droplet create {} --image=ubuntu-16-04-x64 --region={} --size={} --ssh-keys=\"%ssh_keys%\" {} --wait",
name,
region.unwrap_or("sfo1"),
size.unwrap_or("512mb"),
enable_backups_string);
let record_str = format!("doctl compute domain records create {} --record-type=A --record-data=%ip_address% --record-name={}", domain.unwrap_or("one.haus"), subdomain);
let _ = chain::CommandChain::new()
.cmd("doctl compute ssh-key list --no-header --format=ID")
.result_mapped_cmd(&ssh_key_mapping_func, &create_str)
.cmd("doctl compute droplet list --format Name,PublicIPv4,PublicIPv6,Status")
.result_mapped_cmd(&ip_address_mapping_func, &record_str)
.execute();
}
pub fn destroy_droplet_by_name(name: &str, domain: Option<&str>) {
let subdomain = get_subdomain_from_name(name);
let record_id_extractor = |res: &command::Result, cmd_str: String| -> String {
let mut record_id : Option<String> = None;
let res_stdout = res.stdout.clone();
for line in res_stdout.lines() {
if line.starts_with(subdomain) {
debug!("Found: {}", line);
let fields : Vec<&str> = line.split_whitespace().collect();
if fields.len() < 2 {
warn!("Couldn't find ip address in line: {}", line);
}
record_id = Some(fields[1].to_string());
break;
}
}
if record_id == None {
error!("Couldn't locate droplet in output: {}", res_stdout);
return "--will fail--".to_string()
}
let new_cmd = str::replace(&cmd_str, "%record_id%", &record_id.unwrap());
new_cmd.to_string()
};
let domain_name = domain.unwrap_or("one.haus");
let delete_droplet_cmd = format!("doctl compute droplet delete -f {}", name);
let list_records_cmd = format!("doctl compute domain records list {} --format Name,ID --no-header", domain_name);
let delete_record_cmd = format!("doctl compute domain records delete -f {} %record_id%", domain_name);
// TODO: check the result heh
let _ = chain::CommandChain::new()
.cmd_nonfatal(&delete_droplet_cmd)
.cmd(&list_records_cmd)
.result_mapped_cmd(&record_id_extractor, &delete_record_cmd)
.execute();
}
pub fn
|
(name: &str) {
// By default, always attempt to add a new key with [name] mapping to ~/.ssh/id_rsa.pub
let create_key_str = format!("doctl compute ssh-key create {} --public-key=\"$(cat ~/.ssh/id_rsa.pub)\"", name);
println!("Running command:\n\t\t{}", create_key_str);
// Create the actual droplet
let result = command::run_host_cmd(&create_key_str);
if!result.success {
println!("Failed with stderr:\n\n{}", result.stderr);
}
}
|
create_sshkey
|
identifier_name
|
digitalocean.rs
|
use super::command;
use super::chain;
fn get_subdomain_from_name(name: &str) -> &str {
// For subdomains, we only take the first element when split by ".", this allows
// us to create naked domain names or use the same subdomain across domains.
let components: Vec<&str> = name.split(".").collect();
if components.len() > 2 {
// If first throws an error after we've checked length, panic
components.first().unwrap()
} else
|
}
pub fn create_droplet_by_name(name: &str, region: Option<&str>, size: Option<&str>, domain: Option<&str>,
enable_backups: Option<&str>) {
let ssh_key_mapping_func = |res: &command::Result, cmd_str: String| -> String {
let ids : Vec<&str> = res.stdout.lines().collect();
let new_cmd = str::replace(&cmd_str, "%ssh_keys%", &ids.join(","));
new_cmd.to_string()
};
let ip_address_mapping_func = |res: &command::Result, cmd_str: String| -> String {
let mut ip_address : Option<String> = None;
let res_stdout = res.stdout.clone();
for line in res_stdout.lines() {
if line.starts_with(name) {
debug!("Found: {}", line);
let fields : Vec<&str> = line.split_whitespace().collect();
if fields.len() < 2 {
warn!("Couldn't find ip address in line: {}", line);
}
ip_address = Some(fields[1].to_string());
break;
}
}
if ip_address == None {
error!("Couldn't locate droplet in output: {}", res_stdout);
return "--will fail--".to_string()
}
let new_cmd = str::replace(&cmd_str, "%ip_address%", &ip_address.unwrap());
new_cmd.to_string()
};
let subdomain = get_subdomain_from_name(name);
let enable_backups_string: &str;
match enable_backups {
Some("y") => enable_backups_string = "--enable-backups",
Some("Y") => enable_backups_string = "--enable-backups",
Some(_) => enable_backups_string = "",
_ => enable_backups_string = ""
}
let create_str = format!("doctl compute droplet create {} --image=ubuntu-16-04-x64 --region={} --size={} --ssh-keys=\"%ssh_keys%\" {} --wait",
name,
region.unwrap_or("sfo1"),
size.unwrap_or("512mb"),
enable_backups_string);
let record_str = format!("doctl compute domain records create {} --record-type=A --record-data=%ip_address% --record-name={}", domain.unwrap_or("one.haus"), subdomain);
let _ = chain::CommandChain::new()
.cmd("doctl compute ssh-key list --no-header --format=ID")
.result_mapped_cmd(&ssh_key_mapping_func, &create_str)
.cmd("doctl compute droplet list --format Name,PublicIPv4,PublicIPv6,Status")
.result_mapped_cmd(&ip_address_mapping_func, &record_str)
.execute();
}
pub fn destroy_droplet_by_name(name: &str, domain: Option<&str>) {
let subdomain = get_subdomain_from_name(name);
let record_id_extractor = |res: &command::Result, cmd_str: String| -> String {
let mut record_id : Option<String> = None;
let res_stdout = res.stdout.clone();
for line in res_stdout.lines() {
if line.starts_with(subdomain) {
debug!("Found: {}", line);
let fields : Vec<&str> = line.split_whitespace().collect();
if fields.len() < 2 {
warn!("Couldn't find ip address in line: {}", line);
}
record_id = Some(fields[1].to_string());
break;
}
}
if record_id == None {
error!("Couldn't locate droplet in output: {}", res_stdout);
return "--will fail--".to_string()
}
let new_cmd = str::replace(&cmd_str, "%record_id%", &record_id.unwrap());
new_cmd.to_string()
};
let domain_name = domain.unwrap_or("one.haus");
let delete_droplet_cmd = format!("doctl compute droplet delete -f {}", name);
let list_records_cmd = format!("doctl compute domain records list {} --format Name,ID --no-header", domain_name);
let delete_record_cmd = format!("doctl compute domain records delete -f {} %record_id%", domain_name);
// TODO: check the result heh
let _ = chain::CommandChain::new()
.cmd_nonfatal(&delete_droplet_cmd)
.cmd(&list_records_cmd)
.result_mapped_cmd(&record_id_extractor, &delete_record_cmd)
.execute();
}
pub fn create_sshkey(name: &str) {
// By default, always attempt to add a new key with [name] mapping to ~/.ssh/id_rsa.pub
let create_key_str = format!("doctl compute ssh-key create {} --public-key=\"$(cat ~/.ssh/id_rsa.pub)\"", name);
println!("Running command:\n\t\t{}", create_key_str);
// Create the actual droplet
let result = command::run_host_cmd(&create_key_str);
if!result.success {
println!("Failed with stderr:\n\n{}", result.stderr);
}
}
|
{
&"@"
}
|
conditional_block
|
digitalocean.rs
|
use super::command;
use super::chain;
fn get_subdomain_from_name(name: &str) -> &str {
// For subdomains, we only take the first element when split by ".", this allows
// us to create naked domain names or use the same subdomain across domains.
let components: Vec<&str> = name.split(".").collect();
if components.len() > 2 {
// If first throws an error after we've checked length, panic
components.first().unwrap()
} else {
&"@"
}
}
pub fn create_droplet_by_name(name: &str, region: Option<&str>, size: Option<&str>, domain: Option<&str>,
enable_backups: Option<&str>) {
let ssh_key_mapping_func = |res: &command::Result, cmd_str: String| -> String {
let ids : Vec<&str> = res.stdout.lines().collect();
let new_cmd = str::replace(&cmd_str, "%ssh_keys%", &ids.join(","));
new_cmd.to_string()
};
let ip_address_mapping_func = |res: &command::Result, cmd_str: String| -> String {
let mut ip_address : Option<String> = None;
let res_stdout = res.stdout.clone();
for line in res_stdout.lines() {
if line.starts_with(name) {
debug!("Found: {}", line);
let fields : Vec<&str> = line.split_whitespace().collect();
if fields.len() < 2 {
warn!("Couldn't find ip address in line: {}", line);
}
ip_address = Some(fields[1].to_string());
break;
}
}
if ip_address == None {
error!("Couldn't locate droplet in output: {}", res_stdout);
return "--will fail--".to_string()
}
let new_cmd = str::replace(&cmd_str, "%ip_address%", &ip_address.unwrap());
new_cmd.to_string()
};
let subdomain = get_subdomain_from_name(name);
let enable_backups_string: &str;
match enable_backups {
Some("y") => enable_backups_string = "--enable-backups",
Some("Y") => enable_backups_string = "--enable-backups",
Some(_) => enable_backups_string = "",
_ => enable_backups_string = ""
}
let create_str = format!("doctl compute droplet create {} --image=ubuntu-16-04-x64 --region={} --size={} --ssh-keys=\"%ssh_keys%\" {} --wait",
name,
region.unwrap_or("sfo1"),
size.unwrap_or("512mb"),
enable_backups_string);
let record_str = format!("doctl compute domain records create {} --record-type=A --record-data=%ip_address% --record-name={}", domain.unwrap_or("one.haus"), subdomain);
let _ = chain::CommandChain::new()
.cmd("doctl compute ssh-key list --no-header --format=ID")
.result_mapped_cmd(&ssh_key_mapping_func, &create_str)
.cmd("doctl compute droplet list --format Name,PublicIPv4,PublicIPv6,Status")
.result_mapped_cmd(&ip_address_mapping_func, &record_str)
.execute();
}
pub fn destroy_droplet_by_name(name: &str, domain: Option<&str>) {
let subdomain = get_subdomain_from_name(name);
let record_id_extractor = |res: &command::Result, cmd_str: String| -> String {
let mut record_id : Option<String> = None;
let res_stdout = res.stdout.clone();
for line in res_stdout.lines() {
if line.starts_with(subdomain) {
debug!("Found: {}", line);
let fields : Vec<&str> = line.split_whitespace().collect();
if fields.len() < 2 {
warn!("Couldn't find ip address in line: {}", line);
}
record_id = Some(fields[1].to_string());
break;
}
}
if record_id == None {
error!("Couldn't locate droplet in output: {}", res_stdout);
return "--will fail--".to_string()
}
let new_cmd = str::replace(&cmd_str, "%record_id%", &record_id.unwrap());
new_cmd.to_string()
};
let domain_name = domain.unwrap_or("one.haus");
|
let list_records_cmd = format!("doctl compute domain records list {} --format Name,ID --no-header", domain_name);
let delete_record_cmd = format!("doctl compute domain records delete -f {} %record_id%", domain_name);
// TODO: check the result heh
let _ = chain::CommandChain::new()
.cmd_nonfatal(&delete_droplet_cmd)
.cmd(&list_records_cmd)
.result_mapped_cmd(&record_id_extractor, &delete_record_cmd)
.execute();
}
pub fn create_sshkey(name: &str) {
// By default, always attempt to add a new key with [name] mapping to ~/.ssh/id_rsa.pub
let create_key_str = format!("doctl compute ssh-key create {} --public-key=\"$(cat ~/.ssh/id_rsa.pub)\"", name);
println!("Running command:\n\t\t{}", create_key_str);
// Create the actual droplet
let result = command::run_host_cmd(&create_key_str);
if!result.success {
println!("Failed with stderr:\n\n{}", result.stderr);
}
}
|
let delete_droplet_cmd = format!("doctl compute droplet delete -f {}", name);
|
random_line_split
|
digitalocean.rs
|
use super::command;
use super::chain;
fn get_subdomain_from_name(name: &str) -> &str
|
pub fn create_droplet_by_name(name: &str, region: Option<&str>, size: Option<&str>, domain: Option<&str>,
enable_backups: Option<&str>) {
let ssh_key_mapping_func = |res: &command::Result, cmd_str: String| -> String {
let ids : Vec<&str> = res.stdout.lines().collect();
let new_cmd = str::replace(&cmd_str, "%ssh_keys%", &ids.join(","));
new_cmd.to_string()
};
let ip_address_mapping_func = |res: &command::Result, cmd_str: String| -> String {
let mut ip_address : Option<String> = None;
let res_stdout = res.stdout.clone();
for line in res_stdout.lines() {
if line.starts_with(name) {
debug!("Found: {}", line);
let fields : Vec<&str> = line.split_whitespace().collect();
if fields.len() < 2 {
warn!("Couldn't find ip address in line: {}", line);
}
ip_address = Some(fields[1].to_string());
break;
}
}
if ip_address == None {
error!("Couldn't locate droplet in output: {}", res_stdout);
return "--will fail--".to_string()
}
let new_cmd = str::replace(&cmd_str, "%ip_address%", &ip_address.unwrap());
new_cmd.to_string()
};
let subdomain = get_subdomain_from_name(name);
let enable_backups_string: &str;
match enable_backups {
Some("y") => enable_backups_string = "--enable-backups",
Some("Y") => enable_backups_string = "--enable-backups",
Some(_) => enable_backups_string = "",
_ => enable_backups_string = ""
}
let create_str = format!("doctl compute droplet create {} --image=ubuntu-16-04-x64 --region={} --size={} --ssh-keys=\"%ssh_keys%\" {} --wait",
name,
region.unwrap_or("sfo1"),
size.unwrap_or("512mb"),
enable_backups_string);
let record_str = format!("doctl compute domain records create {} --record-type=A --record-data=%ip_address% --record-name={}", domain.unwrap_or("one.haus"), subdomain);
let _ = chain::CommandChain::new()
.cmd("doctl compute ssh-key list --no-header --format=ID")
.result_mapped_cmd(&ssh_key_mapping_func, &create_str)
.cmd("doctl compute droplet list --format Name,PublicIPv4,PublicIPv6,Status")
.result_mapped_cmd(&ip_address_mapping_func, &record_str)
.execute();
}
pub fn destroy_droplet_by_name(name: &str, domain: Option<&str>) {
let subdomain = get_subdomain_from_name(name);
let record_id_extractor = |res: &command::Result, cmd_str: String| -> String {
let mut record_id : Option<String> = None;
let res_stdout = res.stdout.clone();
for line in res_stdout.lines() {
if line.starts_with(subdomain) {
debug!("Found: {}", line);
let fields : Vec<&str> = line.split_whitespace().collect();
if fields.len() < 2 {
warn!("Couldn't find ip address in line: {}", line);
}
record_id = Some(fields[1].to_string());
break;
}
}
if record_id == None {
error!("Couldn't locate droplet in output: {}", res_stdout);
return "--will fail--".to_string()
}
let new_cmd = str::replace(&cmd_str, "%record_id%", &record_id.unwrap());
new_cmd.to_string()
};
let domain_name = domain.unwrap_or("one.haus");
let delete_droplet_cmd = format!("doctl compute droplet delete -f {}", name);
let list_records_cmd = format!("doctl compute domain records list {} --format Name,ID --no-header", domain_name);
let delete_record_cmd = format!("doctl compute domain records delete -f {} %record_id%", domain_name);
// TODO: check the result heh
let _ = chain::CommandChain::new()
.cmd_nonfatal(&delete_droplet_cmd)
.cmd(&list_records_cmd)
.result_mapped_cmd(&record_id_extractor, &delete_record_cmd)
.execute();
}
pub fn create_sshkey(name: &str) {
// By default, always attempt to add a new key with [name] mapping to ~/.ssh/id_rsa.pub
let create_key_str = format!("doctl compute ssh-key create {} --public-key=\"$(cat ~/.ssh/id_rsa.pub)\"", name);
println!("Running command:\n\t\t{}", create_key_str);
// Create the actual droplet
let result = command::run_host_cmd(&create_key_str);
if!result.success {
println!("Failed with stderr:\n\n{}", result.stderr);
}
}
|
{
// For subdomains, we only take the first element when split by ".", this allows
// us to create naked domain names or use the same subdomain across domains.
let components: Vec<&str> = name.split(".").collect();
if components.len() > 2 {
// If first throws an error after we've checked length, panic
components.first().unwrap()
} else {
&"@"
}
}
|
identifier_body
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.