file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
instr_aam.rs
|
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn aam_1() {
run_test(&Instruction { mnemonic: Mnemonic::AAM, operand1: Some(Literal8(116)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[212, 116], OperandSize::Word)
}
#[test]
fn
|
() {
run_test(&Instruction { mnemonic: Mnemonic::AAM, operand1: Some(Literal8(85)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[212, 85], OperandSize::Dword)
}
|
aam_2
|
identifier_name
|
instr_aam.rs
|
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn aam_1() {
run_test(&Instruction { mnemonic: Mnemonic::AAM, operand1: Some(Literal8(116)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[212, 116], OperandSize::Word)
}
#[test]
fn aam_2()
|
{
run_test(&Instruction { mnemonic: Mnemonic::AAM, operand1: Some(Literal8(85)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[212, 85], OperandSize::Dword)
}
|
identifier_body
|
|
instr_aam.rs
|
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
|
#[test]
fn aam_2() {
run_test(&Instruction { mnemonic: Mnemonic::AAM, operand1: Some(Literal8(85)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[212, 85], OperandSize::Dword)
}
|
#[test]
fn aam_1() {
run_test(&Instruction { mnemonic: Mnemonic::AAM, operand1: Some(Literal8(116)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[212, 116], OperandSize::Word)
}
|
random_line_split
|
day_1.rs
|
pub use tdd_kata::graph_search_kata::day_1::{UnidirectedGraph, DepthFirstSearch};
pub use expectest::prelude::{be_equal_to, be_true, be_false};
describe! dfs {
describe! graph {
before_each {
let mut graph = UnidirectedGraph::default();
graph.add_edge(1, 2);
}
it "should add edge to graph" {
expect!(graph.edges()).to(be_equal_to(1));
}
it "should add number of edges to graph" {
graph.add_edge(2, 3);
graph.add_edge(3, 4);
expect!(graph.edges()).to(be_equal_to(3));
}
it "should contain all added vertices" {
graph.add_edge(2, 3);
graph.add_edge(3, 4);
expect!(graph.vertices()).to(be_equal_to(4));
}
|
let adj_to_two = graph.adjacent_to(2);
expect!(adj_to_two).to(be_equal_to(&vec![1]));
}
}
describe! search {
it "should create depth first search" {
let graph = UnidirectedGraph::default();
DepthFirstSearch::new(&graph, 1);
}
it "should have path between adjacent vertices" {
let mut graph = UnidirectedGraph::default();
graph.add_edge(1, 2);
let mut search = DepthFirstSearch::new(&graph, 1);
expect!(search.has_path(2)).to(be_true());
}
it "should not have path between not adjacent vertices" {
let mut graph = UnidirectedGraph::default();
graph.add_edge(1, 2);
graph.add_edge(3, 4);
let mut search = DepthFirstSearch::new(&graph, 1);
expect!(search.has_path(4)).to(be_false());
}
}
}
|
it "should be adjacent to each other" {
let adj_to_one = graph.adjacent_to(1);
expect!(adj_to_one).to(be_equal_to(&vec![2]));
|
random_line_split
|
log.rs
|
use crate::core::counter::Counter;
use crate::iterators::{HistogramIterator, PickMetadata, PickyIterator};
use crate::Histogram;
/// An iterator that will yield at log-size steps through the histogram's value range.
pub struct Iter<'a, T: 'a + Counter> {
hist: &'a Histogram<T>,
// > 1.0
next_value_reporting_level: f64,
// > 1.0
log_base: f64,
current_step_lowest_value_reporting_level: u64,
current_step_highest_value_reporting_level: u64,
}
impl<'a, T: 'a + Counter> Iter<'a, T> {
/// Construct a new logarithmic iterator. See `Histogram::iter_log` for details.
pub fn new(
hist: &'a Histogram<T>,
value_units_in_first_bucket: u64,
log_base: f64,
) -> HistogramIterator<'a, T, Iter<'a, T>> {
assert!(
value_units_in_first_bucket > 0,
"value_units_per_bucket must be > 0"
);
assert!(log_base > 1.0, "log_base must be > 1.0");
let new_lowest = hist.lowest_equivalent(value_units_in_first_bucket - 1);
HistogramIterator::new(
hist,
Iter {
hist,
log_base,
next_value_reporting_level: value_units_in_first_bucket as f64,
current_step_highest_value_reporting_level: value_units_in_first_bucket - 1,
current_step_lowest_value_reporting_level: new_lowest,
},
)
}
}
impl<'a, T: 'a + Counter> PickyIterator<T> for Iter<'a, T> {
fn pick(&mut self, index: usize, _: u64, _: T) -> Option<PickMetadata> {
let val = self.hist.value_for(index);
if val >= self.current_step_lowest_value_reporting_level || index == self.hist.last_index()
{
let metadata =
PickMetadata::new(None, Some(self.current_step_highest_value_reporting_level));
// implies log_base must be > 1.0
self.next_value_reporting_level *= self.log_base;
// won't underflow since next_value_reporting_level starts > 0 and only grows
self.current_step_highest_value_reporting_level =
self.next_value_reporting_level as u64 - 1;
self.current_step_lowest_value_reporting_level = self
.hist
.lowest_equivalent(self.current_step_highest_value_reporting_level);
Some(metadata)
} else {
None
}
}
fn more(&mut self, index_to_pick: usize) -> bool
|
}
|
{
// If the next iterate will not move to the next sub bucket index (which is empty if if we
// reached this point), then we are not yet done iterating (we want to iterate until we are
// no longer on a value that has a count, rather than util we first reach the last value
// that has a count. The difference is subtle but important)...
self.hist
.lowest_equivalent(self.next_value_reporting_level as u64)
< self.hist.value_for(index_to_pick)
}
|
identifier_body
|
log.rs
|
use crate::core::counter::Counter;
use crate::iterators::{HistogramIterator, PickMetadata, PickyIterator};
use crate::Histogram;
/// An iterator that will yield at log-size steps through the histogram's value range.
pub struct Iter<'a, T: 'a + Counter> {
hist: &'a Histogram<T>,
// > 1.0
next_value_reporting_level: f64,
// > 1.0
log_base: f64,
current_step_lowest_value_reporting_level: u64,
current_step_highest_value_reporting_level: u64,
}
impl<'a, T: 'a + Counter> Iter<'a, T> {
/// Construct a new logarithmic iterator. See `Histogram::iter_log` for details.
pub fn new(
hist: &'a Histogram<T>,
value_units_in_first_bucket: u64,
log_base: f64,
) -> HistogramIterator<'a, T, Iter<'a, T>> {
assert!(
value_units_in_first_bucket > 0,
"value_units_per_bucket must be > 0"
);
assert!(log_base > 1.0, "log_base must be > 1.0");
let new_lowest = hist.lowest_equivalent(value_units_in_first_bucket - 1);
HistogramIterator::new(
hist,
Iter {
hist,
log_base,
next_value_reporting_level: value_units_in_first_bucket as f64,
current_step_highest_value_reporting_level: value_units_in_first_bucket - 1,
current_step_lowest_value_reporting_level: new_lowest,
},
)
}
}
impl<'a, T: 'a + Counter> PickyIterator<T> for Iter<'a, T> {
fn
|
(&mut self, index: usize, _: u64, _: T) -> Option<PickMetadata> {
let val = self.hist.value_for(index);
if val >= self.current_step_lowest_value_reporting_level || index == self.hist.last_index()
{
let metadata =
PickMetadata::new(None, Some(self.current_step_highest_value_reporting_level));
// implies log_base must be > 1.0
self.next_value_reporting_level *= self.log_base;
// won't underflow since next_value_reporting_level starts > 0 and only grows
self.current_step_highest_value_reporting_level =
self.next_value_reporting_level as u64 - 1;
self.current_step_lowest_value_reporting_level = self
.hist
.lowest_equivalent(self.current_step_highest_value_reporting_level);
Some(metadata)
} else {
None
}
}
fn more(&mut self, index_to_pick: usize) -> bool {
// If the next iterate will not move to the next sub bucket index (which is empty if if we
// reached this point), then we are not yet done iterating (we want to iterate until we are
// no longer on a value that has a count, rather than util we first reach the last value
// that has a count. The difference is subtle but important)...
self.hist
.lowest_equivalent(self.next_value_reporting_level as u64)
< self.hist.value_for(index_to_pick)
}
}
|
pick
|
identifier_name
|
log.rs
|
use crate::core::counter::Counter;
use crate::iterators::{HistogramIterator, PickMetadata, PickyIterator};
use crate::Histogram;
/// An iterator that will yield at log-size steps through the histogram's value range.
pub struct Iter<'a, T: 'a + Counter> {
hist: &'a Histogram<T>,
// > 1.0
next_value_reporting_level: f64,
// > 1.0
log_base: f64,
current_step_lowest_value_reporting_level: u64,
current_step_highest_value_reporting_level: u64,
}
impl<'a, T: 'a + Counter> Iter<'a, T> {
/// Construct a new logarithmic iterator. See `Histogram::iter_log` for details.
pub fn new(
hist: &'a Histogram<T>,
|
"value_units_per_bucket must be > 0"
);
assert!(log_base > 1.0, "log_base must be > 1.0");
let new_lowest = hist.lowest_equivalent(value_units_in_first_bucket - 1);
HistogramIterator::new(
hist,
Iter {
hist,
log_base,
next_value_reporting_level: value_units_in_first_bucket as f64,
current_step_highest_value_reporting_level: value_units_in_first_bucket - 1,
current_step_lowest_value_reporting_level: new_lowest,
},
)
}
}
impl<'a, T: 'a + Counter> PickyIterator<T> for Iter<'a, T> {
fn pick(&mut self, index: usize, _: u64, _: T) -> Option<PickMetadata> {
let val = self.hist.value_for(index);
if val >= self.current_step_lowest_value_reporting_level || index == self.hist.last_index()
{
let metadata =
PickMetadata::new(None, Some(self.current_step_highest_value_reporting_level));
// implies log_base must be > 1.0
self.next_value_reporting_level *= self.log_base;
// won't underflow since next_value_reporting_level starts > 0 and only grows
self.current_step_highest_value_reporting_level =
self.next_value_reporting_level as u64 - 1;
self.current_step_lowest_value_reporting_level = self
.hist
.lowest_equivalent(self.current_step_highest_value_reporting_level);
Some(metadata)
} else {
None
}
}
fn more(&mut self, index_to_pick: usize) -> bool {
// If the next iterate will not move to the next sub bucket index (which is empty if if we
// reached this point), then we are not yet done iterating (we want to iterate until we are
// no longer on a value that has a count, rather than util we first reach the last value
// that has a count. The difference is subtle but important)...
self.hist
.lowest_equivalent(self.next_value_reporting_level as u64)
< self.hist.value_for(index_to_pick)
}
}
|
value_units_in_first_bucket: u64,
log_base: f64,
) -> HistogramIterator<'a, T, Iter<'a, T>> {
assert!(
value_units_in_first_bucket > 0,
|
random_line_split
|
log.rs
|
use crate::core::counter::Counter;
use crate::iterators::{HistogramIterator, PickMetadata, PickyIterator};
use crate::Histogram;
/// An iterator that will yield at log-size steps through the histogram's value range.
pub struct Iter<'a, T: 'a + Counter> {
hist: &'a Histogram<T>,
// > 1.0
next_value_reporting_level: f64,
// > 1.0
log_base: f64,
current_step_lowest_value_reporting_level: u64,
current_step_highest_value_reporting_level: u64,
}
impl<'a, T: 'a + Counter> Iter<'a, T> {
/// Construct a new logarithmic iterator. See `Histogram::iter_log` for details.
pub fn new(
hist: &'a Histogram<T>,
value_units_in_first_bucket: u64,
log_base: f64,
) -> HistogramIterator<'a, T, Iter<'a, T>> {
assert!(
value_units_in_first_bucket > 0,
"value_units_per_bucket must be > 0"
);
assert!(log_base > 1.0, "log_base must be > 1.0");
let new_lowest = hist.lowest_equivalent(value_units_in_first_bucket - 1);
HistogramIterator::new(
hist,
Iter {
hist,
log_base,
next_value_reporting_level: value_units_in_first_bucket as f64,
current_step_highest_value_reporting_level: value_units_in_first_bucket - 1,
current_step_lowest_value_reporting_level: new_lowest,
},
)
}
}
impl<'a, T: 'a + Counter> PickyIterator<T> for Iter<'a, T> {
fn pick(&mut self, index: usize, _: u64, _: T) -> Option<PickMetadata> {
let val = self.hist.value_for(index);
if val >= self.current_step_lowest_value_reporting_level || index == self.hist.last_index()
{
let metadata =
PickMetadata::new(None, Some(self.current_step_highest_value_reporting_level));
// implies log_base must be > 1.0
self.next_value_reporting_level *= self.log_base;
// won't underflow since next_value_reporting_level starts > 0 and only grows
self.current_step_highest_value_reporting_level =
self.next_value_reporting_level as u64 - 1;
self.current_step_lowest_value_reporting_level = self
.hist
.lowest_equivalent(self.current_step_highest_value_reporting_level);
Some(metadata)
} else
|
}
fn more(&mut self, index_to_pick: usize) -> bool {
// If the next iterate will not move to the next sub bucket index (which is empty if if we
// reached this point), then we are not yet done iterating (we want to iterate until we are
// no longer on a value that has a count, rather than util we first reach the last value
// that has a count. The difference is subtle but important)...
self.hist
.lowest_equivalent(self.next_value_reporting_level as u64)
< self.hist.value_for(index_to_pick)
}
}
|
{
None
}
|
conditional_block
|
handlers.rs
|
// Handle incoming requests.
extern crate iron;
extern crate redis;
extern crate rustc_serialize;
extern crate hyper;
extern crate url;
extern crate router;
use std::vec::Vec;
use rustc_serialize::json::Json;
use iron::modifiers::Redirect;
use iron::headers::{CacheControl, CacheDirective};
use iron::prelude::*;
use iron::status;
use iron::Url as iUrl;
use hyper::client::Client;
use router::Router;
use redis::{Commands, Value};
use helpers::{setup_redis, fetch, get_status_or, local_redir, set_redis_cache};
use github::schedule_update as schedule_github_update;
// The base URL for our badges. We aren't actually compiling them ourselves,
// but are reusing the great shields.io service.
static BADGE_URL_BASE: &'static str = "https://img.shields.io/badge/";
// Github Finder
// Expand a branch name into the hash, cache the redirect for 5min
// `/github/:user/:repo/badge.svg => /github/:user/:repo/:sha/badge.svg`
pub fn
|
(req: &mut Request) -> IronResult<Response> {
// Learn the parameters given to the request
let router = req.extensions.get::<Router>().unwrap();
let redis: redis::Connection = setup_redis();
let hyper_client: Client = Client::new();
let user = router.find("user").unwrap();
let repo = router.find("repo").unwrap();
let branch = router.find("branch").unwrap_or("master");
let method = router.find("method").unwrap_or("badge.svg");
// And the cache key we use to keep the map from branch->SHA
let redis_key = format!("cached-sha/github/{0}/{1}:{2}", user, repo, branch);
// Let's see if redis has this key. If it does, redirect the request
// directly
match redis.get(redis_key.to_owned()) {
Ok(Value::Data(sha)) => {
local_redir(&format!("/github/sha/{0}/{1}/{2}/{3}",
user,
repo,
String::from_utf8(sha).unwrap(),
method),
&req.url)
}
// otherwise, we need to look up the current SHA for the branch
_ => {
let github_url = format!("https://api.github.com/repos/{0}/{1}/git/refs/heads/{2}",
user,
repo,
branch);
// Fetch the content API request for the Github URL,
// Parse its JSON and try to find the `SHA`-key.
if let Some(body) = fetch(&hyper_client, &github_url) {
if let Ok(json) = Json::from_str(&body) {
if let Some(&Json::String(ref sha)) = json.find_path(&["object", "sha"]) {
// Once found, store the SHA in the cache and redirect
// the request to
set_redis_cache(&redis, &redis_key, &sha);
local_redir(&format!("/github/sha/{0}/{1}/{2}/{3}",
user,
repo,
sha,
method),
&req.url)
} else {
// If we couldn't find the SHA, then there is a problem
// we need to inform the user about. Usually this means
// they did a typo or the content moved – either way, we
// fire a 404 – Not Found.
warn!("{}: SHA not found in JSON: {}", &github_url, &json);
Ok(Response::with((status::NotFound,
format!("Couldn't find on Github {}", &github_url))))
}
} else {
warn!("{}: Couldn't parse Githubs JSON response: {}",
&github_url,
&body);
Ok(Response::with((status::InternalServerError,
"Couldn't parse Githubs JSON response")))
}
} else {
Ok(Response::with((status::NotFound,
format!("Couldn't find on Github {}", &github_url))))
}
}
}
}
// ## Github Handler
// Handle the request for a status report of a user-repo-sha combination.
// Usually the request ends up here after having been redirected via the
// `github_finder`-handler.
// In this request is where the actual sausage is done.
pub fn github_handler(req: &mut Request) -> IronResult<Response> {
// First extract all the request information
let router = req.extensions.get::<Router>().unwrap();
let redis: redis::Connection = setup_redis();
let user = router.find("user").unwrap();
let repo = router.find("repo").unwrap();
let sha = router.find("sha").unwrap();
let filename: Vec<&str> = router.find("method")
.unwrap_or("badge.svg")
.rsplitn(2, '.')
.collect();
let (method, ext) = match filename.len() {
2 => (filename[1], filename[0]),
_ => (filename[0], ""),
};
// Use `get_status_or` to look up and map the cached result
// or trigger a `schedule_github_update` if that isn't found yet
let result_key = format!("result/github/{0}/{1}:{2}", user, repo, sha);
let (text, color): (String, String) = get_status_or(
redis.get(result_key.to_owned()),
|| schedule_github_update(&user, &repo, &sha));
// Then render the response
let mut response = match method {
// If this is a simple request for status, just return the result
"status" => Response::with((status::Ok, text.to_owned())),
// for the badge, put text, color, base URL and query-parameters from the
// incoming requests together to the URL we need to forward it to
"badge" => {
let target_badge = match req.url.clone().query {
Some(query) => format!("{}clippy-{}-{}.{}?{}", BADGE_URL_BASE, text, color, ext, query),
_ => format!("{}clippy-{}-{}.{}", BADGE_URL_BASE, text, color, ext),
};
// while linting, use only temporary redirects, so that the actual
// result will be asked for later
Response::with((match text.as_str() {
"linting" => status::Found,
_ => status::MovedPermanently
}, Redirect(iUrl::parse(&target_badge).unwrap())))
},
// emojibadge and fullemojibadge do the same as the request for `badge`,
// except that they replace the status with appropriate emoji
"emojibadge" => {
let emoji = match text.as_str() {
"linting" => "👷".to_string(),
"failed" => "😱".to_string(),
"success" => "👌".to_string(),
_ => text.replace("errors", "🤕").replace("warnings", "😟")
};
let target_badge = match req.url.clone().query {
Some(query) => format!("{}clippy-{}-{}.{}?{}", BADGE_URL_BASE, emoji, color, ext, query),
_ => format!("{}clippy-{}-{}.{}", BADGE_URL_BASE, emoji, color, ext),
};
Response::with((match color.as_str() {
"blue" => status::Found,
_ => status::MovedPermanently
}, Redirect(iUrl::parse(&target_badge).unwrap())))
},
"fullemojibadge" => {
let emoji = match text.as_str() {
"linting" => "👷".to_string(),
"failed" => "😱".to_string(),
"success" => "👌".to_string(),
_ => text.replace("errors", "🤕").replace("warnings", "😟")
};
let target_badge = match req.url.clone().query {
Some(query) => format!("{}📎-{}-{}.{}?{}", BADGE_URL_BASE, emoji, color, ext, query),
_ => format!("{}📎-{}-{}.{}", BADGE_URL_BASE, emoji, color, ext),
};
Response::with((match color.as_str() {
"blue" => status::Found,
_ => status::MovedPermanently
}, Redirect(iUrl::parse(&target_badge).unwrap())))
},
// If the request is asking for the logs, fetch those. This isn't particularly
// simple as the Redis library makes the unwrapping a little bit tricky and hard
// for rust to guess the proper types. So we have to specify the types and iterator
// rather explictly at times.
"log" => {
let log_key = format!("log/github/{0}/{1}:{2}", user, repo, sha);
match redis.lrange(log_key.to_owned(), 0, -1) {
Ok(Some(Value::Bulk(logs))) => {
let logs: Vec<String> = logs.iter()
.map(|ref v| {
match **v {
Value::Data(ref val) => {
String::from_utf8(val.to_owned())
.unwrap()
.to_owned()
}
_ => "".to_owned(),
}
})
.collect();
Response::with((status::Ok, logs.join("\n")))
}
// if there aren't any logs found, we might just started the
// process. Let the request know.
_ => {
Response::with((status::Ok, "Started. Please refresh"))
}
}
},
// Nothing else is supported – but in rust, we have to return all things
// of the same type. So let's return a `BadRequst` :).
_ => Response::with((status::BadRequest, format!("{} Not Implemented.", method))),
};
response.headers.set(CacheControl(vec![CacheDirective::NoCache]));
Ok(response)
}
|
github_finder
|
identifier_name
|
handlers.rs
|
// Handle incoming requests.
extern crate iron;
extern crate redis;
extern crate rustc_serialize;
extern crate hyper;
extern crate url;
extern crate router;
use std::vec::Vec;
use rustc_serialize::json::Json;
use iron::modifiers::Redirect;
use iron::headers::{CacheControl, CacheDirective};
use iron::prelude::*;
use iron::status;
use iron::Url as iUrl;
use hyper::client::Client;
use router::Router;
use redis::{Commands, Value};
use helpers::{setup_redis, fetch, get_status_or, local_redir, set_redis_cache};
use github::schedule_update as schedule_github_update;
// The base URL for our badges. We aren't actually compiling them ourselves,
// but are reusing the great shields.io service.
static BADGE_URL_BASE: &'static str = "https://img.shields.io/badge/";
// Github Finder
// Expand a branch name into the hash, cache the redirect for 5min
// `/github/:user/:repo/badge.svg => /github/:user/:repo/:sha/badge.svg`
pub fn github_finder(req: &mut Request) -> IronResult<Response> {
// Learn the parameters given to the request
let router = req.extensions.get::<Router>().unwrap();
let redis: redis::Connection = setup_redis();
let hyper_client: Client = Client::new();
let user = router.find("user").unwrap();
let repo = router.find("repo").unwrap();
let branch = router.find("branch").unwrap_or("master");
let method = router.find("method").unwrap_or("badge.svg");
// And the cache key we use to keep the map from branch->SHA
let redis_key = format!("cached-sha/github/{0}/{1}:{2}", user, repo, branch);
// Let's see if redis has this key. If it does, redirect the request
// directly
match redis.get(redis_key.to_owned()) {
Ok(Value::Data(sha)) => {
local_redir(&format!("/github/sha/{0}/{1}/{2}/{3}",
user,
repo,
String::from_utf8(sha).unwrap(),
method),
&req.url)
}
// otherwise, we need to look up the current SHA for the branch
|
branch);
// Fetch the content API request for the Github URL,
// Parse its JSON and try to find the `SHA`-key.
if let Some(body) = fetch(&hyper_client, &github_url) {
if let Ok(json) = Json::from_str(&body) {
if let Some(&Json::String(ref sha)) = json.find_path(&["object", "sha"]) {
// Once found, store the SHA in the cache and redirect
// the request to
set_redis_cache(&redis, &redis_key, &sha);
local_redir(&format!("/github/sha/{0}/{1}/{2}/{3}",
user,
repo,
sha,
method),
&req.url)
} else {
// If we couldn't find the SHA, then there is a problem
// we need to inform the user about. Usually this means
// they did a typo or the content moved – either way, we
// fire a 404 – Not Found.
warn!("{}: SHA not found in JSON: {}", &github_url, &json);
Ok(Response::with((status::NotFound,
format!("Couldn't find on Github {}", &github_url))))
}
} else {
warn!("{}: Couldn't parse Githubs JSON response: {}",
&github_url,
&body);
Ok(Response::with((status::InternalServerError,
"Couldn't parse Githubs JSON response")))
}
} else {
Ok(Response::with((status::NotFound,
format!("Couldn't find on Github {}", &github_url))))
}
}
}
}
// ## Github Handler
// Handle the request for a status report of a user-repo-sha combination.
// Usually the request ends up here after having been redirected via the
// `github_finder`-handler.
// In this request is where the actual sausage is done.
pub fn github_handler(req: &mut Request) -> IronResult<Response> {
// First extract all the request information
let router = req.extensions.get::<Router>().unwrap();
let redis: redis::Connection = setup_redis();
let user = router.find("user").unwrap();
let repo = router.find("repo").unwrap();
let sha = router.find("sha").unwrap();
let filename: Vec<&str> = router.find("method")
.unwrap_or("badge.svg")
.rsplitn(2, '.')
.collect();
let (method, ext) = match filename.len() {
2 => (filename[1], filename[0]),
_ => (filename[0], ""),
};
// Use `get_status_or` to look up and map the cached result
// or trigger a `schedule_github_update` if that isn't found yet
let result_key = format!("result/github/{0}/{1}:{2}", user, repo, sha);
let (text, color): (String, String) = get_status_or(
redis.get(result_key.to_owned()),
|| schedule_github_update(&user, &repo, &sha));
// Then render the response
let mut response = match method {
// If this is a simple request for status, just return the result
"status" => Response::with((status::Ok, text.to_owned())),
// for the badge, put text, color, base URL and query-parameters from the
// incoming requests together to the URL we need to forward it to
"badge" => {
let target_badge = match req.url.clone().query {
Some(query) => format!("{}clippy-{}-{}.{}?{}", BADGE_URL_BASE, text, color, ext, query),
_ => format!("{}clippy-{}-{}.{}", BADGE_URL_BASE, text, color, ext),
};
// while linting, use only temporary redirects, so that the actual
// result will be asked for later
Response::with((match text.as_str() {
"linting" => status::Found,
_ => status::MovedPermanently
}, Redirect(iUrl::parse(&target_badge).unwrap())))
},
// emojibadge and fullemojibadge do the same as the request for `badge`,
// except that they replace the status with appropriate emoji
"emojibadge" => {
let emoji = match text.as_str() {
"linting" => "👷".to_string(),
"failed" => "😱".to_string(),
"success" => "👌".to_string(),
_ => text.replace("errors", "🤕").replace("warnings", "😟")
};
let target_badge = match req.url.clone().query {
Some(query) => format!("{}clippy-{}-{}.{}?{}", BADGE_URL_BASE, emoji, color, ext, query),
_ => format!("{}clippy-{}-{}.{}", BADGE_URL_BASE, emoji, color, ext),
};
Response::with((match color.as_str() {
"blue" => status::Found,
_ => status::MovedPermanently
}, Redirect(iUrl::parse(&target_badge).unwrap())))
},
"fullemojibadge" => {
let emoji = match text.as_str() {
"linting" => "👷".to_string(),
"failed" => "😱".to_string(),
"success" => "👌".to_string(),
_ => text.replace("errors", "🤕").replace("warnings", "😟")
};
let target_badge = match req.url.clone().query {
Some(query) => format!("{}📎-{}-{}.{}?{}", BADGE_URL_BASE, emoji, color, ext, query),
_ => format!("{}📎-{}-{}.{}", BADGE_URL_BASE, emoji, color, ext),
};
Response::with((match color.as_str() {
"blue" => status::Found,
_ => status::MovedPermanently
}, Redirect(iUrl::parse(&target_badge).unwrap())))
},
// If the request is asking for the logs, fetch those. This isn't particularly
// simple as the Redis library makes the unwrapping a little bit tricky and hard
// for rust to guess the proper types. So we have to specify the types and iterator
// rather explictly at times.
"log" => {
let log_key = format!("log/github/{0}/{1}:{2}", user, repo, sha);
match redis.lrange(log_key.to_owned(), 0, -1) {
Ok(Some(Value::Bulk(logs))) => {
let logs: Vec<String> = logs.iter()
.map(|ref v| {
match **v {
Value::Data(ref val) => {
String::from_utf8(val.to_owned())
.unwrap()
.to_owned()
}
_ => "".to_owned(),
}
})
.collect();
Response::with((status::Ok, logs.join("\n")))
}
// if there aren't any logs found, we might just started the
// process. Let the request know.
_ => {
Response::with((status::Ok, "Started. Please refresh"))
}
}
},
// Nothing else is supported – but in rust, we have to return all things
// of the same type. So let's return a `BadRequst` :).
_ => Response::with((status::BadRequest, format!("{} Not Implemented.", method))),
};
response.headers.set(CacheControl(vec![CacheDirective::NoCache]));
Ok(response)
}
|
_ => {
let github_url = format!("https://api.github.com/repos/{0}/{1}/git/refs/heads/{2}",
user,
repo,
|
random_line_split
|
handlers.rs
|
// Handle incoming requests.
extern crate iron;
extern crate redis;
extern crate rustc_serialize;
extern crate hyper;
extern crate url;
extern crate router;
use std::vec::Vec;
use rustc_serialize::json::Json;
use iron::modifiers::Redirect;
use iron::headers::{CacheControl, CacheDirective};
use iron::prelude::*;
use iron::status;
use iron::Url as iUrl;
use hyper::client::Client;
use router::Router;
use redis::{Commands, Value};
use helpers::{setup_redis, fetch, get_status_or, local_redir, set_redis_cache};
use github::schedule_update as schedule_github_update;
// The base URL for our badges. We aren't actually compiling them ourselves,
// but are reusing the great shields.io service.
static BADGE_URL_BASE: &'static str = "https://img.shields.io/badge/";
// Github Finder
// Expand a branch name into the hash, cache the redirect for 5min
// `/github/:user/:repo/badge.svg => /github/:user/:repo/:sha/badge.svg`
pub fn github_finder(req: &mut Request) -> IronResult<Response> {
// Learn the parameters given to the request
let router = req.extensions.get::<Router>().unwrap();
let redis: redis::Connection = setup_redis();
let hyper_client: Client = Client::new();
let user = router.find("user").unwrap();
let repo = router.find("repo").unwrap();
let branch = router.find("branch").unwrap_or("master");
let method = router.find("method").unwrap_or("badge.svg");
// And the cache key we use to keep the map from branch->SHA
let redis_key = format!("cached-sha/github/{0}/{1}:{2}", user, repo, branch);
// Let's see if redis has this key. If it does, redirect the request
// directly
match redis.get(redis_key.to_owned()) {
Ok(Value::Data(sha)) => {
local_redir(&format!("/github/sha/{0}/{1}/{2}/{3}",
user,
repo,
String::from_utf8(sha).unwrap(),
method),
&req.url)
}
// otherwise, we need to look up the current SHA for the branch
_ => {
let github_url = format!("https://api.github.com/repos/{0}/{1}/git/refs/heads/{2}",
user,
repo,
branch);
// Fetch the content API request for the Github URL,
// Parse its JSON and try to find the `SHA`-key.
if let Some(body) = fetch(&hyper_client, &github_url) {
if let Ok(json) = Json::from_str(&body) {
if let Some(&Json::String(ref sha)) = json.find_path(&["object", "sha"]) {
// Once found, store the SHA in the cache and redirect
// the request to
set_redis_cache(&redis, &redis_key, &sha);
local_redir(&format!("/github/sha/{0}/{1}/{2}/{3}",
user,
repo,
sha,
method),
&req.url)
} else {
// If we couldn't find the SHA, then there is a problem
// we need to inform the user about. Usually this means
// they did a typo or the content moved – either way, we
// fire a 404 – Not Found.
warn!("{}: SHA not found in JSON: {}", &github_url, &json);
Ok(Response::with((status::NotFound,
format!("Couldn't find on Github {}", &github_url))))
}
} else {
warn!("{}: Couldn't parse Githubs JSON response: {}",
&github_url,
&body);
Ok(Response::with((status::InternalServerError,
"Couldn't parse Githubs JSON response")))
}
} else {
Ok(Response::with((status::NotFound,
format!("Couldn't find on Github {}", &github_url))))
}
}
}
}
// ## Github Handler
// Handle the request for a status report of a user-repo-sha combination.
// Usually the request ends up here after having been redirected via the
// `github_finder`-handler.
// In this request is where the actual sausage is done.
pub fn github_handler(req: &mut Request) -> IronResult<Response> {
|
redis.get(result_key.to_owned()),
|| schedule_github_update(&user, &repo, &sha));
// Then render the response
let mut response = match method {
// If this is a simple request for status, just return the result
"status" => Response::with((status::Ok, text.to_owned())),
// for the badge, put text, color, base URL and query-parameters from the
// incoming requests together to the URL we need to forward it to
"badge" => {
let target_badge = match req.url.clone().query {
Some(query) => format!("{}clippy-{}-{}.{}?{}", BADGE_URL_BASE, text, color, ext, query),
_ => format!("{}clippy-{}-{}.{}", BADGE_URL_BASE, text, color, ext),
};
// while linting, use only temporary redirects, so that the actual
// result will be asked for later
Response::with((match text.as_str() {
"linting" => status::Found,
_ => status::MovedPermanently
}, Redirect(iUrl::parse(&target_badge).unwrap())))
},
// emojibadge and fullemojibadge do the same as the request for `badge`,
// except that they replace the status with appropriate emoji
"emojibadge" => {
let emoji = match text.as_str() {
"linting" => "👷".to_string(),
"failed" => "😱".to_string(),
"success" => "👌".to_string(),
_ => text.replace("errors", "🤕").replace("warnings", "😟")
};
let target_badge = match req.url.clone().query {
Some(query) => format!("{}clippy-{}-{}.{}?{}", BADGE_URL_BASE, emoji, color, ext, query),
_ => format!("{}clippy-{}-{}.{}", BADGE_URL_BASE, emoji, color, ext),
};
Response::with((match color.as_str() {
"blue" => status::Found,
_ => status::MovedPermanently
}, Redirect(iUrl::parse(&target_badge).unwrap())))
},
"fullemojibadge" => {
let emoji = match text.as_str() {
"linting" => "👷".to_string(),
"failed" => "😱".to_string(),
"success" => "👌".to_string(),
_ => text.replace("errors", "🤕").replace("warnings", "😟")
};
let target_badge = match req.url.clone().query {
Some(query) => format!("{}📎-{}-{}.{}?{}", BADGE_URL_BASE, emoji, color, ext, query),
_ => format!("{}📎-{}-{}.{}", BADGE_URL_BASE, emoji, color, ext),
};
Response::with((match color.as_str() {
"blue" => status::Found,
_ => status::MovedPermanently
}, Redirect(iUrl::parse(&target_badge).unwrap())))
},
// If the request is asking for the logs, fetch those. This isn't particularly
// simple as the Redis library makes the unwrapping a little bit tricky and hard
// for rust to guess the proper types. So we have to specify the types and iterator
// rather explictly at times.
"log" => {
let log_key = format!("log/github/{0}/{1}:{2}", user, repo, sha);
match redis.lrange(log_key.to_owned(), 0, -1) {
Ok(Some(Value::Bulk(logs))) => {
let logs: Vec<String> = logs.iter()
.map(|ref v| {
match **v {
Value::Data(ref val) => {
String::from_utf8(val.to_owned())
.unwrap()
.to_owned()
}
_ => "".to_owned(),
}
})
.collect();
Response::with((status::Ok, logs.join("\n")))
}
// if there aren't any logs found, we might just started the
// process. Let the request know.
_ => {
Response::with((status::Ok, "Started. Please refresh"))
}
}
},
// Nothing else is supported – but in rust, we have to return all things
// of the same type. So let's return a `BadRequst` :).
_ => Response::with((status::BadRequest, format!("{} Not Implemented.", method))),
};
response.headers.set(CacheControl(vec![CacheDirective::NoCache]));
Ok(response)
}
|
// First extract all the request information
let router = req.extensions.get::<Router>().unwrap();
let redis: redis::Connection = setup_redis();
let user = router.find("user").unwrap();
let repo = router.find("repo").unwrap();
let sha = router.find("sha").unwrap();
let filename: Vec<&str> = router.find("method")
.unwrap_or("badge.svg")
.rsplitn(2, '.')
.collect();
let (method, ext) = match filename.len() {
2 => (filename[1], filename[0]),
_ => (filename[0], ""),
};
// Use `get_status_or` to look up and map the cached result
// or trigger a `schedule_github_update` if that isn't found yet
let result_key = format!("result/github/{0}/{1}:{2}", user, repo, sha);
let (text, color): (String, String) = get_status_or(
|
identifier_body
|
local.rs
|
//! The thread-local state.
use std::{mem, thread};
use std::cell::RefCell;
use {global, hazard, guard, debug, settings};
use garbage::Garbage;
thread_local! {
/// The state of this thread.
static STATE: RefCell<State> = RefCell::new(State::default());
}
/// Add new garbage to be deleted.
///
/// This garbage is pushed to a thread-local queue. When enough garbage is accumulated in the
/// thread, it is exported to the global state.
pub fn add_garbage(garbage: Garbage) {
// Print message in debug mode.
debug::exec(|| println!("Adding garbage: {:?}", garbage));
// Since this function can trigger a GC, it must not be called inside a guard constructor.
guard::debug_assert_no_create();
if STATE.state() == thread::LocalKeyState::Destroyed {
// The state was deinitialized, so we must rely on the global state for queueing garbage.
global::export_garbage(vec![garbage]);
} else {
// Add the garbage.
if STATE.with(|s| s.borrow_mut().add_garbage(garbage)) {
// The local state exported garbage to the global state, hence we must tick in order to
// ensure that the garbage is periodically collected.
global::tick();
}
}
}
/// Get a blocked hazard.
///
/// If possible, this will simply pop one of the thread-local cache of hazards. Otherwise, one must
/// be registered in the global state.
///
/// # Fence
///
/// This does not fence, and you must thus be careful with updating the value afterwards, as
/// reordering can happen, meaning that the hazard has not been blocked yet.
pub fn get_hazard() -> hazard::Writer {
if STATE.state() == thread::LocalKeyState::Destroyed {
// The state was deinitialized, so we must rely on the global state for creating new
// hazards.
global::create_hazard()
} else {
STATE.with(|s| s.borrow_mut().get_hazard())
}
}
/// Free a hazard.
///
/// This frees a hazard to the thread-local cache of hazards.
///
/// It is important that the hazard is **not** in blocked state, as such thing can cause infinite
/// looping.
///
/// # Panics
///
/// This might panic in debug mode if the hazard given is in blocked state, as such thing can cause
/// infinite garbage collection cycle, or if the hazard is in dead state, as that means that it may
/// not be reusable (it could be destroyed).
pub fn free_hazard(hazard: hazard::Writer) {
// Print message in debug mode.
debug::exec(|| println!("Freeing hazard: {:?}", hazard));
// Since this function can trigger a GC, it must not be called inside a guard constructor.
guard::debug_assert_no_create();
debug_assert!(!hazard.is_blocked(), "Illegally freeing a blocked hazards.");
if STATE.state() == thread::LocalKeyState::Destroyed {
// Since the state was deinitialized, we cannot store it for later reuse, so we are forced
// to simply kill the hazard.
hazard.kill();
} else {
STATE.with(|s| s.borrow_mut().free_hazard(hazard));
}
}
/// Export the garbage of this thread to the global state.
///
/// This is useful for propagating accumulated garbage such that it can be destroyed by the next
/// garbage collection.
pub fn export_garbage() {
// Since this function can trigger a GC, it must not be called inside a guard constructor.
guard::debug_assert_no_create();
// We can only export when the TLS variable isn't destroyed. Otherwise, there would be nothing
// to export!
if STATE.state()!= thread::LocalKeyState::Destroyed {
STATE.with(|s| s.borrow_mut().export_garbage());
// We tick after the state is no longer reserved, as the tick could potentially call
// destructor that access the TLS variable.
global::tick();
}
}
/// A thread-local state.
#[derive(Default)]
struct
|
{
/// The cached garbage waiting to be exported to the global state.
garbage: Vec<Garbage>,
/// The cache of currently available hazards.
///
/// We maintain this cache to avoid the performance hit of creating new hazards.
///
/// The hazards in this vector are not necessarily in state "free". Only when a sufficient
/// amount of available hazards has accumulated, they will be set to free. This means that we
/// don't have to reset the state of a hazard after usage, giving a quite significant speed-up.
available_hazards: Vec<hazard::Writer>,
/// The hazards in the cache before this index are free.
///
/// This number keeps track what hazards in `self.available_hazard` are set to state "free".
/// Before this index, every hazard must be set to "free".
///
/// It is useful for knowing when to free the hazards to allow garbage collection.
available_hazards_free_before: usize,
}
impl State {
/// Get the number of hazards in the cache which are not in state "free".
fn non_free_hazards(&self) -> usize {
self.available_hazards.len() - self.available_hazards_free_before
}
/// See `get_hazard()`.
fn get_hazard(&mut self) -> hazard::Writer {
// Check if there is hazards in the cache.
if let Some(hazard) = self.available_hazards.pop() {
// There is; we don't need to create a new hazard.
// Since the hazard popped from the cache is not blocked, we must block the hazard to
// satisfy the requirements of this function.
hazard.block();
hazard
} else {
// There is not; we must create a new hazard.
global::create_hazard()
}
}
/// See `free_hazard()`.
fn free_hazard(&mut self, hazard: hazard::Writer) {
// FIXME: This can lead to some subtle bugs, since the dtor is unpredictable as there is no
// way of predicting when the hazard is cleared.
// Push the given hazard to the cache.
self.available_hazards.push(hazard);
// Check if we exceeded the limit.
if self.non_free_hazards() > settings::get().max_non_free_hazards {
// We did; we must now set the non-free hazards to "free".
for i in &self.available_hazards[self.available_hazards_free_before..] {
i.free();
}
// Update the counter such that we mark the new hazards set to "free".
self.available_hazards_free_before = self.available_hazards.len();
}
}
/// Queues garbage to destroy.
///
/// Eventually the added garbage will be exported to the global state through
/// `global::add_garbage()`.
///
/// See `add_garbage` for more information.
///
/// When this happens (i.e. the global state gets the garbage), it returns `true`. Otherwise,
/// it returns `false`.
fn add_garbage(&mut self, garbage: Garbage) -> bool {
// Push the garbage to the cache of garbage.
self.garbage.push(garbage);
// Export the garbage if it exceeds the limit.
// TODO: use memory instead of items as a metric.
if self.garbage.len() > settings::get().max_garbage_before_export {
self.export_garbage();
true
} else { false }
}
/// See `export_garbage()` for more information.
fn export_garbage(&mut self) {
// Print message in debug mode.
debug::exec(|| println!("Exporting garbage."));
// Clear the vector and export the garbage.
global::export_garbage(mem::replace(&mut self.garbage, Vec::new()));
}
}
impl Drop for State {
fn drop(&mut self) {
// Clear every hazard to "dead" state.
for hazard in self.available_hazards.drain(..) {
hazard.kill();
}
// The thread is exiting, thus we must export the garbage to the global state to avoid
// memory leaks. It is very important that this does indeed not tick, as causing garbage
// collection means accessing RNG state, a TLS variable, which cannot be done when, we are
// here, after it has deinitialized.
// TODO: Figure out a way we can tick anyway.
self.export_garbage();
}
}
#[cfg(test)]
mod tests {
use super::*;
use garbage::Garbage;
use hazard;
use std::thread;
#[test]
fn dtor_runs() {
fn dtor(x: *const u8) {
unsafe {
*(x as *mut u8) = 1;
}
}
for _ in 0..1000 {
let b = Box::new(0);
let h = get_hazard();
h.protect(&*b);
add_garbage(Garbage::new(&*b, dtor));
::gc();
assert_eq!(*b, 0);
::gc();
h.free();
::gc();
assert_eq!(*b, 1);
}
}
#[test]
fn dtor_runs_cross_thread() {
fn dtor(x: *const u8) {
unsafe {
*(x as *mut u8) = 1;
}
}
for _ in 0..1000 {
let b = Box::new(0);
let bptr = &*b as *const _ as usize;
let h = thread::spawn(move || {
let h = get_hazard();
h.protect(bptr as *const u8);
h
}).join().unwrap();
add_garbage(Garbage::new(&*b, dtor));
::gc();
assert_eq!(*b, 0);
::gc();
h.free();
::gc();
assert_eq!(*b, 1);
}
}
#[test]
fn clear_hazards() {
let mut s = State::default();
let mut v = Vec::new();
for _ in 0..100 {
let (w, r) = hazard::create();
w.protect(0x1 as *const u8);
v.push(r);
s.free_hazard(w);
}
for i in &v[0..16] {
assert_eq!(i.get(), hazard::State::Free);
}
mem::forget(v);
}
#[test]
fn kill_hazards() {
fn dtor(x: *const u8) {
unsafe {
*(x as *mut u8) = 1;
}
}
for _ in 0..1000 {
let b = thread::spawn(move || {
let b = Box::new(0);
let h = get_hazard();
h.protect(&*b);
add_garbage(Garbage::new(&*b, dtor));
::gc();
assert_eq!(*b, 0);
b
}).join().unwrap();
::gc();
assert_eq!(*b, 1);
}
}
#[cfg(debug_assertions)]
#[test]
#[should_panic]
fn debug_free_blocked() {
use std::mem;
let (writer, reader) = hazard::create();
mem::forget(reader);
free_hazard(writer);
}
}
|
State
|
identifier_name
|
local.rs
|
//! The thread-local state.
use std::{mem, thread};
use std::cell::RefCell;
use {global, hazard, guard, debug, settings};
use garbage::Garbage;
thread_local! {
/// The state of this thread.
static STATE: RefCell<State> = RefCell::new(State::default());
}
/// Add new garbage to be deleted.
///
/// This garbage is pushed to a thread-local queue. When enough garbage is accumulated in the
/// thread, it is exported to the global state.
pub fn add_garbage(garbage: Garbage) {
// Print message in debug mode.
debug::exec(|| println!("Adding garbage: {:?}", garbage));
// Since this function can trigger a GC, it must not be called inside a guard constructor.
guard::debug_assert_no_create();
if STATE.state() == thread::LocalKeyState::Destroyed {
// The state was deinitialized, so we must rely on the global state for queueing garbage.
global::export_garbage(vec![garbage]);
} else {
// Add the garbage.
if STATE.with(|s| s.borrow_mut().add_garbage(garbage)) {
// The local state exported garbage to the global state, hence we must tick in order to
// ensure that the garbage is periodically collected.
global::tick();
}
}
}
/// Get a blocked hazard.
///
/// If possible, this will simply pop one of the thread-local cache of hazards. Otherwise, one must
/// be registered in the global state.
///
/// # Fence
///
/// This does not fence, and you must thus be careful with updating the value afterwards, as
/// reordering can happen, meaning that the hazard has not been blocked yet.
pub fn get_hazard() -> hazard::Writer {
if STATE.state() == thread::LocalKeyState::Destroyed {
// The state was deinitialized, so we must rely on the global state for creating new
// hazards.
global::create_hazard()
} else {
STATE.with(|s| s.borrow_mut().get_hazard())
}
}
/// Free a hazard.
///
/// This frees a hazard to the thread-local cache of hazards.
///
/// It is important that the hazard is **not** in blocked state, as such thing can cause infinite
/// looping.
///
/// # Panics
///
/// This might panic in debug mode if the hazard given is in blocked state, as such thing can cause
/// infinite garbage collection cycle, or if the hazard is in dead state, as that means that it may
/// not be reusable (it could be destroyed).
pub fn free_hazard(hazard: hazard::Writer) {
// Print message in debug mode.
debug::exec(|| println!("Freeing hazard: {:?}", hazard));
// Since this function can trigger a GC, it must not be called inside a guard constructor.
guard::debug_assert_no_create();
debug_assert!(!hazard.is_blocked(), "Illegally freeing a blocked hazards.");
if STATE.state() == thread::LocalKeyState::Destroyed {
// Since the state was deinitialized, we cannot store it for later reuse, so we are forced
// to simply kill the hazard.
hazard.kill();
} else {
STATE.with(|s| s.borrow_mut().free_hazard(hazard));
}
}
/// Export the garbage of this thread to the global state.
///
/// This is useful for propagating accumulated garbage such that it can be destroyed by the next
/// garbage collection.
pub fn export_garbage() {
// Since this function can trigger a GC, it must not be called inside a guard constructor.
guard::debug_assert_no_create();
// We can only export when the TLS variable isn't destroyed. Otherwise, there would be nothing
// to export!
if STATE.state()!= thread::LocalKeyState::Destroyed {
STATE.with(|s| s.borrow_mut().export_garbage());
// We tick after the state is no longer reserved, as the tick could potentially call
// destructor that access the TLS variable.
global::tick();
}
}
/// A thread-local state.
#[derive(Default)]
struct State {
/// The cached garbage waiting to be exported to the global state.
garbage: Vec<Garbage>,
/// The cache of currently available hazards.
///
/// We maintain this cache to avoid the performance hit of creating new hazards.
///
/// The hazards in this vector are not necessarily in state "free". Only when a sufficient
/// amount of available hazards has accumulated, they will be set to free. This means that we
/// don't have to reset the state of a hazard after usage, giving a quite significant speed-up.
available_hazards: Vec<hazard::Writer>,
/// The hazards in the cache before this index are free.
///
/// This number keeps track what hazards in `self.available_hazard` are set to state "free".
/// Before this index, every hazard must be set to "free".
///
/// It is useful for knowing when to free the hazards to allow garbage collection.
available_hazards_free_before: usize,
}
impl State {
/// Get the number of hazards in the cache which are not in state "free".
fn non_free_hazards(&self) -> usize {
self.available_hazards.len() - self.available_hazards_free_before
}
/// See `get_hazard()`.
fn get_hazard(&mut self) -> hazard::Writer {
// Check if there is hazards in the cache.
if let Some(hazard) = self.available_hazards.pop() {
// There is; we don't need to create a new hazard.
// Since the hazard popped from the cache is not blocked, we must block the hazard to
// satisfy the requirements of this function.
hazard.block();
hazard
} else {
// There is not; we must create a new hazard.
global::create_hazard()
}
}
/// See `free_hazard()`.
fn free_hazard(&mut self, hazard: hazard::Writer) {
// FIXME: This can lead to some subtle bugs, since the dtor is unpredictable as there is no
// way of predicting when the hazard is cleared.
// Push the given hazard to the cache.
self.available_hazards.push(hazard);
// Check if we exceeded the limit.
if self.non_free_hazards() > settings::get().max_non_free_hazards {
// We did; we must now set the non-free hazards to "free".
for i in &self.available_hazards[self.available_hazards_free_before..] {
i.free();
}
// Update the counter such that we mark the new hazards set to "free".
self.available_hazards_free_before = self.available_hazards.len();
}
}
/// Queues garbage to destroy.
///
/// Eventually the added garbage will be exported to the global state through
/// `global::add_garbage()`.
///
/// See `add_garbage` for more information.
///
/// When this happens (i.e. the global state gets the garbage), it returns `true`. Otherwise,
/// it returns `false`.
fn add_garbage(&mut self, garbage: Garbage) -> bool {
// Push the garbage to the cache of garbage.
self.garbage.push(garbage);
// Export the garbage if it exceeds the limit.
// TODO: use memory instead of items as a metric.
if self.garbage.len() > settings::get().max_garbage_before_export {
self.export_garbage();
true
} else
|
}
/// See `export_garbage()` for more information.
fn export_garbage(&mut self) {
// Print message in debug mode.
debug::exec(|| println!("Exporting garbage."));
// Clear the vector and export the garbage.
global::export_garbage(mem::replace(&mut self.garbage, Vec::new()));
}
}
impl Drop for State {
fn drop(&mut self) {
// Clear every hazard to "dead" state.
for hazard in self.available_hazards.drain(..) {
hazard.kill();
}
// The thread is exiting, thus we must export the garbage to the global state to avoid
// memory leaks. It is very important that this does indeed not tick, as causing garbage
// collection means accessing RNG state, a TLS variable, which cannot be done when, we are
// here, after it has deinitialized.
// TODO: Figure out a way we can tick anyway.
self.export_garbage();
}
}
#[cfg(test)]
mod tests {
use super::*;
use garbage::Garbage;
use hazard;
use std::thread;
#[test]
fn dtor_runs() {
fn dtor(x: *const u8) {
unsafe {
*(x as *mut u8) = 1;
}
}
for _ in 0..1000 {
let b = Box::new(0);
let h = get_hazard();
h.protect(&*b);
add_garbage(Garbage::new(&*b, dtor));
::gc();
assert_eq!(*b, 0);
::gc();
h.free();
::gc();
assert_eq!(*b, 1);
}
}
#[test]
fn dtor_runs_cross_thread() {
fn dtor(x: *const u8) {
unsafe {
*(x as *mut u8) = 1;
}
}
for _ in 0..1000 {
let b = Box::new(0);
let bptr = &*b as *const _ as usize;
let h = thread::spawn(move || {
let h = get_hazard();
h.protect(bptr as *const u8);
h
}).join().unwrap();
add_garbage(Garbage::new(&*b, dtor));
::gc();
assert_eq!(*b, 0);
::gc();
h.free();
::gc();
assert_eq!(*b, 1);
}
}
#[test]
fn clear_hazards() {
let mut s = State::default();
let mut v = Vec::new();
for _ in 0..100 {
let (w, r) = hazard::create();
w.protect(0x1 as *const u8);
v.push(r);
s.free_hazard(w);
}
for i in &v[0..16] {
assert_eq!(i.get(), hazard::State::Free);
}
mem::forget(v);
}
#[test]
fn kill_hazards() {
fn dtor(x: *const u8) {
unsafe {
*(x as *mut u8) = 1;
}
}
for _ in 0..1000 {
let b = thread::spawn(move || {
let b = Box::new(0);
let h = get_hazard();
h.protect(&*b);
add_garbage(Garbage::new(&*b, dtor));
::gc();
assert_eq!(*b, 0);
b
}).join().unwrap();
::gc();
assert_eq!(*b, 1);
}
}
#[cfg(debug_assertions)]
#[test]
#[should_panic]
fn debug_free_blocked() {
use std::mem;
let (writer, reader) = hazard::create();
mem::forget(reader);
free_hazard(writer);
}
}
|
{ false }
|
conditional_block
|
local.rs
|
//! The thread-local state.
use std::{mem, thread};
use std::cell::RefCell;
use {global, hazard, guard, debug, settings};
use garbage::Garbage;
thread_local! {
/// The state of this thread.
static STATE: RefCell<State> = RefCell::new(State::default());
}
/// Add new garbage to be deleted.
///
/// This garbage is pushed to a thread-local queue. When enough garbage is accumulated in the
/// thread, it is exported to the global state.
pub fn add_garbage(garbage: Garbage) {
// Print message in debug mode.
debug::exec(|| println!("Adding garbage: {:?}", garbage));
// Since this function can trigger a GC, it must not be called inside a guard constructor.
guard::debug_assert_no_create();
if STATE.state() == thread::LocalKeyState::Destroyed {
// The state was deinitialized, so we must rely on the global state for queueing garbage.
global::export_garbage(vec![garbage]);
} else {
// Add the garbage.
if STATE.with(|s| s.borrow_mut().add_garbage(garbage)) {
// The local state exported garbage to the global state, hence we must tick in order to
// ensure that the garbage is periodically collected.
global::tick();
}
}
}
/// Get a blocked hazard.
///
/// If possible, this will simply pop one of the thread-local cache of hazards. Otherwise, one must
/// be registered in the global state.
///
/// # Fence
///
/// This does not fence, and you must thus be careful with updating the value afterwards, as
/// reordering can happen, meaning that the hazard has not been blocked yet.
pub fn get_hazard() -> hazard::Writer {
if STATE.state() == thread::LocalKeyState::Destroyed {
// The state was deinitialized, so we must rely on the global state for creating new
// hazards.
global::create_hazard()
} else {
STATE.with(|s| s.borrow_mut().get_hazard())
}
}
/// Free a hazard.
///
/// This frees a hazard to the thread-local cache of hazards.
///
/// It is important that the hazard is **not** in blocked state, as such thing can cause infinite
/// looping.
///
/// # Panics
///
/// This might panic in debug mode if the hazard given is in blocked state, as such thing can cause
/// infinite garbage collection cycle, or if the hazard is in dead state, as that means that it may
/// not be reusable (it could be destroyed).
pub fn free_hazard(hazard: hazard::Writer) {
// Print message in debug mode.
debug::exec(|| println!("Freeing hazard: {:?}", hazard));
// Since this function can trigger a GC, it must not be called inside a guard constructor.
guard::debug_assert_no_create();
debug_assert!(!hazard.is_blocked(), "Illegally freeing a blocked hazards.");
if STATE.state() == thread::LocalKeyState::Destroyed {
// Since the state was deinitialized, we cannot store it for later reuse, so we are forced
// to simply kill the hazard.
hazard.kill();
} else {
STATE.with(|s| s.borrow_mut().free_hazard(hazard));
}
}
/// Export the garbage of this thread to the global state.
///
/// This is useful for propagating accumulated garbage such that it can be destroyed by the next
/// garbage collection.
pub fn export_garbage() {
// Since this function can trigger a GC, it must not be called inside a guard constructor.
guard::debug_assert_no_create();
// We can only export when the TLS variable isn't destroyed. Otherwise, there would be nothing
// to export!
if STATE.state()!= thread::LocalKeyState::Destroyed {
STATE.with(|s| s.borrow_mut().export_garbage());
// We tick after the state is no longer reserved, as the tick could potentially call
// destructor that access the TLS variable.
global::tick();
}
}
/// A thread-local state.
#[derive(Default)]
struct State {
/// The cached garbage waiting to be exported to the global state.
garbage: Vec<Garbage>,
/// The cache of currently available hazards.
///
/// We maintain this cache to avoid the performance hit of creating new hazards.
///
/// The hazards in this vector are not necessarily in state "free". Only when a sufficient
/// amount of available hazards has accumulated, they will be set to free. This means that we
/// don't have to reset the state of a hazard after usage, giving a quite significant speed-up.
available_hazards: Vec<hazard::Writer>,
/// The hazards in the cache before this index are free.
///
/// This number keeps track what hazards in `self.available_hazard` are set to state "free".
/// Before this index, every hazard must be set to "free".
///
/// It is useful for knowing when to free the hazards to allow garbage collection.
available_hazards_free_before: usize,
}
impl State {
/// Get the number of hazards in the cache which are not in state "free".
fn non_free_hazards(&self) -> usize {
self.available_hazards.len() - self.available_hazards_free_before
}
/// See `get_hazard()`.
fn get_hazard(&mut self) -> hazard::Writer {
// Check if there is hazards in the cache.
if let Some(hazard) = self.available_hazards.pop() {
// There is; we don't need to create a new hazard.
// Since the hazard popped from the cache is not blocked, we must block the hazard to
// satisfy the requirements of this function.
hazard.block();
hazard
} else {
// There is not; we must create a new hazard.
global::create_hazard()
}
}
/// See `free_hazard()`.
fn free_hazard(&mut self, hazard: hazard::Writer) {
// FIXME: This can lead to some subtle bugs, since the dtor is unpredictable as there is no
// way of predicting when the hazard is cleared.
// Push the given hazard to the cache.
self.available_hazards.push(hazard);
// Check if we exceeded the limit.
if self.non_free_hazards() > settings::get().max_non_free_hazards {
// We did; we must now set the non-free hazards to "free".
for i in &self.available_hazards[self.available_hazards_free_before..] {
i.free();
}
// Update the counter such that we mark the new hazards set to "free".
self.available_hazards_free_before = self.available_hazards.len();
}
}
/// Queues garbage to destroy.
///
/// Eventually the added garbage will be exported to the global state through
/// `global::add_garbage()`.
///
/// See `add_garbage` for more information.
///
/// When this happens (i.e. the global state gets the garbage), it returns `true`. Otherwise,
/// it returns `false`.
fn add_garbage(&mut self, garbage: Garbage) -> bool {
// Push the garbage to the cache of garbage.
self.garbage.push(garbage);
// Export the garbage if it exceeds the limit.
// TODO: use memory instead of items as a metric.
if self.garbage.len() > settings::get().max_garbage_before_export {
self.export_garbage();
true
} else { false }
}
/// See `export_garbage()` for more information.
fn export_garbage(&mut self) {
// Print message in debug mode.
debug::exec(|| println!("Exporting garbage."));
// Clear the vector and export the garbage.
global::export_garbage(mem::replace(&mut self.garbage, Vec::new()));
}
}
impl Drop for State {
fn drop(&mut self) {
// Clear every hazard to "dead" state.
for hazard in self.available_hazards.drain(..) {
hazard.kill();
}
// The thread is exiting, thus we must export the garbage to the global state to avoid
// memory leaks. It is very important that this does indeed not tick, as causing garbage
// collection means accessing RNG state, a TLS variable, which cannot be done when, we are
// here, after it has deinitialized.
// TODO: Figure out a way we can tick anyway.
self.export_garbage();
}
}
#[cfg(test)]
mod tests {
use super::*;
use garbage::Garbage;
use hazard;
use std::thread;
#[test]
fn dtor_runs() {
fn dtor(x: *const u8) {
unsafe {
*(x as *mut u8) = 1;
}
}
for _ in 0..1000 {
let b = Box::new(0);
let h = get_hazard();
h.protect(&*b);
add_garbage(Garbage::new(&*b, dtor));
::gc();
assert_eq!(*b, 0);
::gc();
h.free();
::gc();
assert_eq!(*b, 1);
}
}
#[test]
fn dtor_runs_cross_thread() {
fn dtor(x: *const u8) {
unsafe {
*(x as *mut u8) = 1;
}
}
for _ in 0..1000 {
let b = Box::new(0);
let bptr = &*b as *const _ as usize;
let h = thread::spawn(move || {
let h = get_hazard();
h.protect(bptr as *const u8);
h
}).join().unwrap();
add_garbage(Garbage::new(&*b, dtor));
::gc();
assert_eq!(*b, 0);
::gc();
h.free();
::gc();
assert_eq!(*b, 1);
}
}
#[test]
fn clear_hazards() {
let mut s = State::default();
let mut v = Vec::new();
for _ in 0..100 {
let (w, r) = hazard::create();
w.protect(0x1 as *const u8);
v.push(r);
s.free_hazard(w);
}
for i in &v[0..16] {
assert_eq!(i.get(), hazard::State::Free);
}
mem::forget(v);
}
#[test]
fn kill_hazards() {
fn dtor(x: *const u8) {
unsafe {
*(x as *mut u8) = 1;
}
}
for _ in 0..1000 {
let b = thread::spawn(move || {
let b = Box::new(0);
let h = get_hazard();
h.protect(&*b);
add_garbage(Garbage::new(&*b, dtor));
::gc();
assert_eq!(*b, 0);
b
}).join().unwrap();
::gc();
assert_eq!(*b, 1);
}
}
|
fn debug_free_blocked() {
use std::mem;
let (writer, reader) = hazard::create();
mem::forget(reader);
free_hazard(writer);
}
}
|
#[cfg(debug_assertions)]
#[test]
#[should_panic]
|
random_line_split
|
no_0094_binary_tree_inorder_traversal.rs
|
// Definition for a binary tree node.
#[derive(Debug, PartialEq, Eq)]
pub struct TreeNode {
pub val: i32,
pub left: Option<Rc<RefCell<TreeNode>>>,
pub right: Option<Rc<RefCell<TreeNode>>>,
}
impl TreeNode {
#[inline]
pub fn new(val: i32) -> Self {
TreeNode {
val,
left: None,
right: None,
}
}
}
use std::cell::RefCell;
use std::rc::Rc;
struct Solution;
impl Solution {
pub fn inorder_traversal(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<i32> {
if root.is_none() {
return Vec::new();
}
|
stack.push(root.unwrap());
while let Some(node) = stack.pop() {
let left_node = node.borrow_mut().left.take();
if let Some(left) = left_node {
// 有左子树,把 node 和左子树都放进去
stack.push(node);
stack.push(left);
continue;
}
// 访问中间的节点。
ans.push(node.borrow().val);
let right_node = node.borrow_mut().right.take();
if let Some(right) = right_node {
// 只有右子节点,把它入栈
stack.push(right);
}
}
ans
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_inorder_traversal() {
let root = node(TreeNode {
val: 1,
left: None,
right: node(TreeNode {
val: 2,
left: node(TreeNode::new(3)),
right: None,
}),
});
assert_eq!(Solution::inorder_traversal(root), vec![1, 3, 2]);
}
fn node(n: TreeNode) -> Option<Rc<RefCell<TreeNode>>> {
Some(Rc::new(RefCell::new(n)))
}
}
|
let mut ans = Vec::new();
let mut stack = Vec::new();
|
random_line_split
|
no_0094_binary_tree_inorder_traversal.rs
|
// Definition for a binary tree node.
#[derive(Debug, PartialEq, Eq)]
pub struct TreeNode {
pub val: i32,
pub left: Option<Rc<RefCell<TreeNode>>>,
pub right: Option<Rc<RefCell<TreeNode>>>,
}
impl TreeNode {
#[inline]
pub fn new(val: i32) -> Self {
TreeNode {
val,
left: None,
right: None,
}
}
}
use std::cell::RefCell;
use std::rc::Rc;
struct Solution;
impl Solution {
pub fn inorder_traversal(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<i32> {
if root.is_none() {
return Vec::new();
}
let mut ans = Vec::new();
let mut stack = Vec::new();
stack.push(root.unwrap());
while let Some(node) = stack.pop() {
let left_node = node.borrow_mut().left.take();
if let Some(left) = left_node {
// 有左子树,把 node 和左子树都放进去
stack.push(node);
stack.push(left);
continue;
}
// 访问中间的节点。
ans.push(node.borrow().val);
let right_node = node.borrow_mut().right.take();
if let Some(right) = right_node {
// 只有右子节点,把它入栈
stack.push(right);
}
}
ans
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_inorder_traversal() {
let root = node(TreeNode {
val: 1,
|
Some(Rc::new(RefCell::new(n)))
}
}
|
left: None,
right: node(TreeNode {
val: 2,
left: node(TreeNode::new(3)),
right: None,
}),
});
assert_eq!(Solution::inorder_traversal(root), vec![1, 3, 2]);
}
fn node(n: TreeNode) -> Option<Rc<RefCell<TreeNode>>> {
|
identifier_body
|
no_0094_binary_tree_inorder_traversal.rs
|
// Definition for a binary tree node.
#[derive(Debug, PartialEq, Eq)]
pub struct TreeNode {
pub val: i32,
pub left: Option<Rc<RefCell<TreeNode>>>,
pub right: Option<Rc<RefCell<TreeNode>>>,
}
impl TreeNode {
#[inline]
pub fn new(val: i32) -> Self {
TreeNode {
val,
left: None,
right: None,
}
}
}
use std::cell::RefCell;
use std::rc::Rc;
struct Solution;
impl Solution {
pub fn inorder_traversal(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<i32> {
if root.is_none()
|
let mut ans = Vec::new();
let mut stack = Vec::new();
stack.push(root.unwrap());
while let Some(node) = stack.pop() {
let left_node = node.borrow_mut().left.take();
if let Some(left) = left_node {
// 有左子树,把 node 和左子树都放进去
stack.push(node);
stack.push(left);
continue;
}
// 访问中间的节点。
ans.push(node.borrow().val);
let right_node = node.borrow_mut().right.take();
if let Some(right) = right_node {
// 只有右子节点,把它入栈
stack.push(right);
}
}
ans
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_inorder_traversal() {
let root = node(TreeNode {
val: 1,
left: None,
right: node(TreeNode {
val: 2,
left: node(TreeNode::new(3)),
right: None,
}),
});
assert_eq!(Solution::inorder_traversal(root), vec![1, 3, 2]);
}
fn node(n: TreeNode) -> Option<Rc<RefCell<TreeNode>>> {
Some(Rc::new(RefCell::new(n)))
}
}
|
{
return Vec::new();
}
|
conditional_block
|
no_0094_binary_tree_inorder_traversal.rs
|
// Definition for a binary tree node.
#[derive(Debug, PartialEq, Eq)]
pub struct TreeNode {
pub val: i32,
pub left: Option<Rc<RefCell<TreeNode>>>,
pub right: Option<Rc<RefCell<TreeNode>>>,
}
impl TreeNode {
#[inline]
pub fn new(val: i32) -> Self {
TreeNode {
val,
left: None,
right: None,
}
}
}
use std::cell::RefCell;
use std::rc::Rc;
struct Solution;
impl Solution {
pub fn
|
(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<i32> {
if root.is_none() {
return Vec::new();
}
let mut ans = Vec::new();
let mut stack = Vec::new();
stack.push(root.unwrap());
while let Some(node) = stack.pop() {
let left_node = node.borrow_mut().left.take();
if let Some(left) = left_node {
// 有左子树,把 node 和左子树都放进去
stack.push(node);
stack.push(left);
continue;
}
// 访问中间的节点。
ans.push(node.borrow().val);
let right_node = node.borrow_mut().right.take();
if let Some(right) = right_node {
// 只有右子节点,把它入栈
stack.push(right);
}
}
ans
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_inorder_traversal() {
let root = node(TreeNode {
val: 1,
left: None,
right: node(TreeNode {
val: 2,
left: node(TreeNode::new(3)),
right: None,
}),
});
assert_eq!(Solution::inorder_traversal(root), vec![1, 3, 2]);
}
fn node(n: TreeNode) -> Option<Rc<RefCell<TreeNode>>> {
Some(Rc::new(RefCell::new(n)))
}
}
|
inorder_traversal
|
identifier_name
|
class-typarams.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::marker::PhantomData;
struct cat<U> {
meows : uint,
how_hungry : int,
m: PhantomData<U>
}
impl<U> cat<U> {
pub fn speak(&mut self)
|
pub fn meow_count(&mut self) -> uint { self.meows }
}
fn cat<U>(in_x : uint, in_y : int) -> cat<U> {
cat {
meows: in_x,
how_hungry: in_y,
m: PhantomData
}
}
pub fn main() {
let _nyan : cat<int> = cat::<int>(52, 99);
// let mut kitty = cat(1000, 2);
}
|
{ self.meows += 1; }
|
identifier_body
|
class-typarams.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::marker::PhantomData;
struct cat<U> {
meows : uint,
how_hungry : int,
m: PhantomData<U>
}
|
pub fn meow_count(&mut self) -> uint { self.meows }
}
fn cat<U>(in_x : uint, in_y : int) -> cat<U> {
cat {
meows: in_x,
how_hungry: in_y,
m: PhantomData
}
}
pub fn main() {
let _nyan : cat<int> = cat::<int>(52, 99);
// let mut kitty = cat(1000, 2);
}
|
impl<U> cat<U> {
pub fn speak(&mut self) { self.meows += 1; }
|
random_line_split
|
class-typarams.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::marker::PhantomData;
struct cat<U> {
meows : uint,
how_hungry : int,
m: PhantomData<U>
}
impl<U> cat<U> {
pub fn speak(&mut self) { self.meows += 1; }
pub fn meow_count(&mut self) -> uint { self.meows }
}
fn
|
<U>(in_x : uint, in_y : int) -> cat<U> {
cat {
meows: in_x,
how_hungry: in_y,
m: PhantomData
}
}
pub fn main() {
let _nyan : cat<int> = cat::<int>(52, 99);
// let mut kitty = cat(1000, 2);
}
|
cat
|
identifier_name
|
main.rs
|
/*!
This crate implements a simple arithmetic calculator, using the [`rust-scan`](https://github.com/DanielKeep/rust-scan.git) package to do the parsing.
*/
#![feature(phase)]
#[phase(plugin)] extern crate scan;
extern crate scan_util;
use std::borrow::ToOwned;
use std::collections::HashMap;
use std::num::Float;
use scan_util::{OtherScanError, ScanIoError};
use self::Stmt::{LetStmt, ExprStmt};
use self::Expr::{Add, Sub, Mul, Div, Var, Lit};
fn main() {
let mut vars = HashMap::new();
vars.insert("NaN".to_owned(), Float::nan());
vars.insert("inf".to_owned(), Float::infinity());
vars.insert("pi".to_owned(), ::std::f64::consts::PI);
vars.insert("e".to_owned(), ::std::f64::consts::E);
println!("Type `.help` or `.h` for brief instructions.");
println!("Type `.quit` or `.q` to quit.");
loop {
print!("> ");
let res = scanln! {
// Show brief help message.
".h" | ".help" => {
println!("Type expressions to be evaluated.");
println!("You can use +, -, *, /.");
println!("You can define variables like so: `let pi = 3.14159`.");
println!("Type.q or.quit to quit.");
continue;
},
// Exit.
".q" | ".quit" => {
println!("Bye!");
return;
},
// Fallback for unknown commands.
"." cmd:&str,.._ => {
println!("error: unknown command `{}`", cmd);
println!("Try `.help`.");
continue;
},
// Ignore empty lines.
_:() => continue,
// Otherwise, try to scan a statement.
stmt:Stmt => stmt,
};
let stmt = match res {
Err(ScanIoError(err)) => {
println!("io error: {}", err);
std::os::set_exit_status(1);
return;
},
Err(err @ OtherScanError(..)) => {
println!("error: {}", err);
continue;
},
Ok(stmt) => stmt,
};
match stmt {
LetStmt(name, expr) => {
let vars = &mut vars;
eval(expr, vars).map(|v| vars.insert(name.clone(), v));
},
ExprStmt(expr) => {
eval(expr, &vars).map(|v| println!("= {}", v));
},
}
}
}
/**
Evaluates an expression, returning `Some(f64)` if there were no errors.
If something *does* go wrong, it will just print the error message and return `None`. This really should be using `Result`, but I'm too lazy for that.
*/
fn eval(expr: Expr, vars: &HashMap<String, f64>) -> Option<f64> {
match expr {
Add(box lhs, box rhs) => eval(lhs, vars).zip(eval(rhs, vars)).map(|(l,r)| l+r),
Sub(box lhs, box rhs) => eval(lhs, vars).zip(eval(rhs, vars)).map(|(l,r)| l-r),
Mul(box lhs, box rhs) => eval(lhs, vars).zip(eval(rhs, vars)).map(|(l,r)| l*r),
Div(box lhs, box rhs) => eval(lhs, vars).zip(eval(rhs, vars)).map(|(l,r)| l/r),
Var(name) => {
match vars.get(&name).cloned() {
Some(v) => Some(v),
None => {
println!("error: undefined variable `{}`.", name);
None
}
}
},
Lit(v) => Some(v),
}
}
/*
The trick to using scan here is to map each grammar production to a type, then implement a scanner for that type. As results are passed back up the call stack, we "unpack" the grammar production type into the actual semantic type.
This is actually something of a limitation of the `Scanner` trait; ideally, we could use UFCS (which isn't implemented yet) to have a capture like `lhs:MulExpr` which uses the scanner code defined for the `MulExpr` type, but which *results* in a `Expr` value.
The grammar we're parsing is given below. Note that it's arranged this way to encode operator precedence directly into the grammar.
```{notrust}
<expr> := <add-expr>
<add-expr> := <mul-expr> ( ( "+" | "-" ) <add-expr> )?
<mul-expr> := <atom-expr> ( ( "*" | "/" ) <mul-expr> )?
<atom-expr> := "(" <add-expr> ")"
| <literal>
| <identifier>
```
*/
#[derive(PartialEq, Show)]
enum Stmt {
LetStmt(String, Expr),
ExprStmt(Expr),
}
scanner! { Stmt,
// We need to use a different tokeniser here to ensure that `name` can be any identifier.
#[tokenizer="IdentsAndInts"]
"let" name "=" expr => LetStmt(name, expr),
expr => ExprStmt(expr),
}
#[derive(PartialEq, Show)]
enum Expr {
Add(Box<Expr>, Box<Expr>),
Sub(Box<Expr>, Box<Expr>),
Mul(Box<Expr>, Box<Expr>),
Div(Box<Expr>, Box<Expr>),
Var(String),
Lit(f64),
}
scanner! { Expr,
// Just forward on to the corresponding grammar production: AddExpr.
expr:AddExpr => {
let AddExpr(expr) = expr;
expr
}
}
struct
|
(Expr);
scanner! { AddExpr,
// We *could* boil this down to a single arm, but it wouldn't be as neat.
lhs:MulExpr "+" rhs:AddExpr => {
// Again, we only need MulExpr and AddExpr in order to trigger the correct scanning rules. What we actually *care* about are the Exprs inside.
let (MulExpr(lhs), AddExpr(rhs)) = (lhs, rhs);
AddExpr(Add(box lhs, box rhs))
},
lhs:MulExpr "-" rhs:AddExpr => {
let (MulExpr(lhs), AddExpr(rhs)) = (lhs, rhs);
AddExpr(Sub(box lhs, box rhs))
},
lhs:MulExpr => {
let MulExpr(lhs) = lhs;
AddExpr(lhs)
}
}
struct MulExpr(Expr);
scanner! { MulExpr,
lhs:AtomExpr "*" rhs:MulExpr => {
let (AtomExpr(lhs), MulExpr(rhs)) = (lhs, rhs);
MulExpr(Mul(box lhs, box rhs))
},
lhs:AtomExpr "/" rhs:MulExpr => {
let (AtomExpr(lhs), MulExpr(rhs)) = (lhs, rhs);
MulExpr(Div(box lhs, box rhs))
},
lhs:AtomExpr => {
let AtomExpr(lhs) = lhs;
MulExpr(lhs)
}
}
struct AtomExpr(Expr);
scanner! { AtomExpr,
"(" expr:AddExpr ")" => {
let AddExpr(expr) = expr;
AtomExpr(expr)
},
lit => AtomExpr(Lit(lit)),
// Again, we use a different tokeniser. Also note that it's not *entirely* correct. This technically means we can have a variable called `*`, which means `***` is a perfectly valid expression.
#[tokenizer="IdentsAndInts"]
name => AtomExpr(Var(name)),
}
/**
This is just a helper trait used to simplify evaluating binary expressions.
*/
trait ZipOption<L> {
fn zip<R>(self, other: Option<R>) -> Option<(L, R)>;
}
impl<L> ZipOption<L> for Option<L> {
fn zip<R>(self, other: Option<R>) -> Option<(L, R)> {
match (self, other) {
(None, _) => None,
(Some(_), None) => None,
(Some(l), Some(r)) => Some((l, r))
}
}
}
|
AddExpr
|
identifier_name
|
main.rs
|
/*!
This crate implements a simple arithmetic calculator, using the [`rust-scan`](https://github.com/DanielKeep/rust-scan.git) package to do the parsing.
*/
#![feature(phase)]
#[phase(plugin)] extern crate scan;
extern crate scan_util;
use std::borrow::ToOwned;
use std::collections::HashMap;
use std::num::Float;
use scan_util::{OtherScanError, ScanIoError};
use self::Stmt::{LetStmt, ExprStmt};
use self::Expr::{Add, Sub, Mul, Div, Var, Lit};
fn main()
|
continue;
},
// Exit.
".q" | ".quit" => {
println!("Bye!");
return;
},
// Fallback for unknown commands.
"." cmd:&str,.._ => {
println!("error: unknown command `{}`", cmd);
println!("Try `.help`.");
continue;
},
// Ignore empty lines.
_:() => continue,
// Otherwise, try to scan a statement.
stmt:Stmt => stmt,
};
let stmt = match res {
Err(ScanIoError(err)) => {
println!("io error: {}", err);
std::os::set_exit_status(1);
return;
},
Err(err @ OtherScanError(..)) => {
println!("error: {}", err);
continue;
},
Ok(stmt) => stmt,
};
match stmt {
LetStmt(name, expr) => {
let vars = &mut vars;
eval(expr, vars).map(|v| vars.insert(name.clone(), v));
},
ExprStmt(expr) => {
eval(expr, &vars).map(|v| println!("= {}", v));
},
}
}
}
/**
Evaluates an expression, returning `Some(f64)` if there were no errors.
If something *does* go wrong, it will just print the error message and return `None`. This really should be using `Result`, but I'm too lazy for that.
*/
fn eval(expr: Expr, vars: &HashMap<String, f64>) -> Option<f64> {
match expr {
Add(box lhs, box rhs) => eval(lhs, vars).zip(eval(rhs, vars)).map(|(l,r)| l+r),
Sub(box lhs, box rhs) => eval(lhs, vars).zip(eval(rhs, vars)).map(|(l,r)| l-r),
Mul(box lhs, box rhs) => eval(lhs, vars).zip(eval(rhs, vars)).map(|(l,r)| l*r),
Div(box lhs, box rhs) => eval(lhs, vars).zip(eval(rhs, vars)).map(|(l,r)| l/r),
Var(name) => {
match vars.get(&name).cloned() {
Some(v) => Some(v),
None => {
println!("error: undefined variable `{}`.", name);
None
}
}
},
Lit(v) => Some(v),
}
}
/*
The trick to using scan here is to map each grammar production to a type, then implement a scanner for that type. As results are passed back up the call stack, we "unpack" the grammar production type into the actual semantic type.
This is actually something of a limitation of the `Scanner` trait; ideally, we could use UFCS (which isn't implemented yet) to have a capture like `lhs:MulExpr` which uses the scanner code defined for the `MulExpr` type, but which *results* in a `Expr` value.
The grammar we're parsing is given below. Note that it's arranged this way to encode operator precedence directly into the grammar.
```{notrust}
<expr> := <add-expr>
<add-expr> := <mul-expr> ( ( "+" | "-" ) <add-expr> )?
<mul-expr> := <atom-expr> ( ( "*" | "/" ) <mul-expr> )?
<atom-expr> := "(" <add-expr> ")"
| <literal>
| <identifier>
```
*/
#[derive(PartialEq, Show)]
enum Stmt {
LetStmt(String, Expr),
ExprStmt(Expr),
}
scanner! { Stmt,
// We need to use a different tokeniser here to ensure that `name` can be any identifier.
#[tokenizer="IdentsAndInts"]
"let" name "=" expr => LetStmt(name, expr),
expr => ExprStmt(expr),
}
#[derive(PartialEq, Show)]
enum Expr {
Add(Box<Expr>, Box<Expr>),
Sub(Box<Expr>, Box<Expr>),
Mul(Box<Expr>, Box<Expr>),
Div(Box<Expr>, Box<Expr>),
Var(String),
Lit(f64),
}
scanner! { Expr,
// Just forward on to the corresponding grammar production: AddExpr.
expr:AddExpr => {
let AddExpr(expr) = expr;
expr
}
}
struct AddExpr(Expr);
scanner! { AddExpr,
// We *could* boil this down to a single arm, but it wouldn't be as neat.
lhs:MulExpr "+" rhs:AddExpr => {
// Again, we only need MulExpr and AddExpr in order to trigger the correct scanning rules. What we actually *care* about are the Exprs inside.
let (MulExpr(lhs), AddExpr(rhs)) = (lhs, rhs);
AddExpr(Add(box lhs, box rhs))
},
lhs:MulExpr "-" rhs:AddExpr => {
let (MulExpr(lhs), AddExpr(rhs)) = (lhs, rhs);
AddExpr(Sub(box lhs, box rhs))
},
lhs:MulExpr => {
let MulExpr(lhs) = lhs;
AddExpr(lhs)
}
}
struct MulExpr(Expr);
scanner! { MulExpr,
lhs:AtomExpr "*" rhs:MulExpr => {
let (AtomExpr(lhs), MulExpr(rhs)) = (lhs, rhs);
MulExpr(Mul(box lhs, box rhs))
},
lhs:AtomExpr "/" rhs:MulExpr => {
let (AtomExpr(lhs), MulExpr(rhs)) = (lhs, rhs);
MulExpr(Div(box lhs, box rhs))
},
lhs:AtomExpr => {
let AtomExpr(lhs) = lhs;
MulExpr(lhs)
}
}
struct AtomExpr(Expr);
scanner! { AtomExpr,
"(" expr:AddExpr ")" => {
let AddExpr(expr) = expr;
AtomExpr(expr)
},
lit => AtomExpr(Lit(lit)),
// Again, we use a different tokeniser. Also note that it's not *entirely* correct. This technically means we can have a variable called `*`, which means `***` is a perfectly valid expression.
#[tokenizer="IdentsAndInts"]
name => AtomExpr(Var(name)),
}
/**
This is just a helper trait used to simplify evaluating binary expressions.
*/
trait ZipOption<L> {
fn zip<R>(self, other: Option<R>) -> Option<(L, R)>;
}
impl<L> ZipOption<L> for Option<L> {
fn zip<R>(self, other: Option<R>) -> Option<(L, R)> {
match (self, other) {
(None, _) => None,
(Some(_), None) => None,
(Some(l), Some(r)) => Some((l, r))
}
}
}
|
{
let mut vars = HashMap::new();
vars.insert("NaN".to_owned(), Float::nan());
vars.insert("inf".to_owned(), Float::infinity());
vars.insert("pi".to_owned(), ::std::f64::consts::PI);
vars.insert("e".to_owned(), ::std::f64::consts::E);
println!("Type `.help` or `.h` for brief instructions.");
println!("Type `.quit` or `.q` to quit.");
loop {
print!("> ");
let res = scanln! {
// Show brief help message.
".h" | ".help" => {
println!("Type expressions to be evaluated.");
println!("You can use +, -, *, /.");
println!("You can define variables like so: `let pi = 3.14159`.");
println!("Type .q or .quit to quit.");
|
identifier_body
|
main.rs
|
/*!
This crate implements a simple arithmetic calculator, using the [`rust-scan`](https://github.com/DanielKeep/rust-scan.git) package to do the parsing.
*/
#![feature(phase)]
#[phase(plugin)] extern crate scan;
extern crate scan_util;
use std::borrow::ToOwned;
use std::collections::HashMap;
use std::num::Float;
use scan_util::{OtherScanError, ScanIoError};
use self::Stmt::{LetStmt, ExprStmt};
use self::Expr::{Add, Sub, Mul, Div, Var, Lit};
fn main() {
let mut vars = HashMap::new();
vars.insert("NaN".to_owned(), Float::nan());
vars.insert("inf".to_owned(), Float::infinity());
vars.insert("pi".to_owned(), ::std::f64::consts::PI);
vars.insert("e".to_owned(), ::std::f64::consts::E);
println!("Type `.help` or `.h` for brief instructions.");
println!("Type `.quit` or `.q` to quit.");
loop {
print!("> ");
let res = scanln! {
// Show brief help message.
".h" | ".help" => {
println!("Type expressions to be evaluated.");
println!("You can use +, -, *, /.");
println!("You can define variables like so: `let pi = 3.14159`.");
println!("Type.q or.quit to quit.");
continue;
},
// Exit.
".q" | ".quit" => {
println!("Bye!");
return;
},
// Fallback for unknown commands.
"." cmd:&str,.._ => {
println!("error: unknown command `{}`", cmd);
println!("Try `.help`.");
continue;
},
// Ignore empty lines.
_:() => continue,
// Otherwise, try to scan a statement.
stmt:Stmt => stmt,
};
let stmt = match res {
Err(ScanIoError(err)) => {
println!("io error: {}", err);
std::os::set_exit_status(1);
return;
},
Err(err @ OtherScanError(..)) => {
println!("error: {}", err);
continue;
},
|
};
match stmt {
LetStmt(name, expr) => {
let vars = &mut vars;
eval(expr, vars).map(|v| vars.insert(name.clone(), v));
},
ExprStmt(expr) => {
eval(expr, &vars).map(|v| println!("= {}", v));
},
}
}
}
/**
Evaluates an expression, returning `Some(f64)` if there were no errors.
If something *does* go wrong, it will just print the error message and return `None`. This really should be using `Result`, but I'm too lazy for that.
*/
fn eval(expr: Expr, vars: &HashMap<String, f64>) -> Option<f64> {
match expr {
Add(box lhs, box rhs) => eval(lhs, vars).zip(eval(rhs, vars)).map(|(l,r)| l+r),
Sub(box lhs, box rhs) => eval(lhs, vars).zip(eval(rhs, vars)).map(|(l,r)| l-r),
Mul(box lhs, box rhs) => eval(lhs, vars).zip(eval(rhs, vars)).map(|(l,r)| l*r),
Div(box lhs, box rhs) => eval(lhs, vars).zip(eval(rhs, vars)).map(|(l,r)| l/r),
Var(name) => {
match vars.get(&name).cloned() {
Some(v) => Some(v),
None => {
println!("error: undefined variable `{}`.", name);
None
}
}
},
Lit(v) => Some(v),
}
}
/*
The trick to using scan here is to map each grammar production to a type, then implement a scanner for that type. As results are passed back up the call stack, we "unpack" the grammar production type into the actual semantic type.
This is actually something of a limitation of the `Scanner` trait; ideally, we could use UFCS (which isn't implemented yet) to have a capture like `lhs:MulExpr` which uses the scanner code defined for the `MulExpr` type, but which *results* in a `Expr` value.
The grammar we're parsing is given below. Note that it's arranged this way to encode operator precedence directly into the grammar.
```{notrust}
<expr> := <add-expr>
<add-expr> := <mul-expr> ( ( "+" | "-" ) <add-expr> )?
<mul-expr> := <atom-expr> ( ( "*" | "/" ) <mul-expr> )?
<atom-expr> := "(" <add-expr> ")"
| <literal>
| <identifier>
```
*/
#[derive(PartialEq, Show)]
enum Stmt {
LetStmt(String, Expr),
ExprStmt(Expr),
}
scanner! { Stmt,
// We need to use a different tokeniser here to ensure that `name` can be any identifier.
#[tokenizer="IdentsAndInts"]
"let" name "=" expr => LetStmt(name, expr),
expr => ExprStmt(expr),
}
#[derive(PartialEq, Show)]
enum Expr {
Add(Box<Expr>, Box<Expr>),
Sub(Box<Expr>, Box<Expr>),
Mul(Box<Expr>, Box<Expr>),
Div(Box<Expr>, Box<Expr>),
Var(String),
Lit(f64),
}
scanner! { Expr,
// Just forward on to the corresponding grammar production: AddExpr.
expr:AddExpr => {
let AddExpr(expr) = expr;
expr
}
}
struct AddExpr(Expr);
scanner! { AddExpr,
// We *could* boil this down to a single arm, but it wouldn't be as neat.
lhs:MulExpr "+" rhs:AddExpr => {
// Again, we only need MulExpr and AddExpr in order to trigger the correct scanning rules. What we actually *care* about are the Exprs inside.
let (MulExpr(lhs), AddExpr(rhs)) = (lhs, rhs);
AddExpr(Add(box lhs, box rhs))
},
lhs:MulExpr "-" rhs:AddExpr => {
let (MulExpr(lhs), AddExpr(rhs)) = (lhs, rhs);
AddExpr(Sub(box lhs, box rhs))
},
lhs:MulExpr => {
let MulExpr(lhs) = lhs;
AddExpr(lhs)
}
}
struct MulExpr(Expr);
scanner! { MulExpr,
lhs:AtomExpr "*" rhs:MulExpr => {
let (AtomExpr(lhs), MulExpr(rhs)) = (lhs, rhs);
MulExpr(Mul(box lhs, box rhs))
},
lhs:AtomExpr "/" rhs:MulExpr => {
let (AtomExpr(lhs), MulExpr(rhs)) = (lhs, rhs);
MulExpr(Div(box lhs, box rhs))
},
lhs:AtomExpr => {
let AtomExpr(lhs) = lhs;
MulExpr(lhs)
}
}
struct AtomExpr(Expr);
scanner! { AtomExpr,
"(" expr:AddExpr ")" => {
let AddExpr(expr) = expr;
AtomExpr(expr)
},
lit => AtomExpr(Lit(lit)),
// Again, we use a different tokeniser. Also note that it's not *entirely* correct. This technically means we can have a variable called `*`, which means `***` is a perfectly valid expression.
#[tokenizer="IdentsAndInts"]
name => AtomExpr(Var(name)),
}
/**
This is just a helper trait used to simplify evaluating binary expressions.
*/
trait ZipOption<L> {
fn zip<R>(self, other: Option<R>) -> Option<(L, R)>;
}
impl<L> ZipOption<L> for Option<L> {
fn zip<R>(self, other: Option<R>) -> Option<(L, R)> {
match (self, other) {
(None, _) => None,
(Some(_), None) => None,
(Some(l), Some(r)) => Some((l, r))
}
}
}
|
Ok(stmt) => stmt,
|
random_line_split
|
namespace.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use gecko_bindings::structs::nsIAtom;
use selectors::bloom::BloomHash;
use std::borrow::Borrow;
use std::ops::Deref;
use {Atom, WeakAtom};
#[macro_export]
macro_rules! ns {
() => { $crate::Namespace(atom!("")) }
}
#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)]
pub struct Namespace(pub Atom);
pub struct
|
(WeakAtom);
impl Deref for Namespace {
type Target = WeakNamespace;
#[inline]
fn deref(&self) -> &WeakNamespace {
let weak: *const WeakAtom = &*self.0;
unsafe {
&*(weak as *const WeakNamespace)
}
}
}
impl Borrow<WeakNamespace> for Namespace {
#[inline]
fn borrow(&self) -> &WeakNamespace {
self
}
}
impl WeakNamespace {
#[inline]
pub unsafe fn new<'a>(atom: *mut nsIAtom) -> &'a Self {
&*(atom as *const WeakNamespace)
}
#[inline]
pub fn clone(&self) -> Namespace {
Namespace(self.0.clone())
}
}
impl Eq for WeakNamespace {}
impl PartialEq for WeakNamespace {
#[inline]
fn eq(&self, other: &Self) -> bool {
let weak: *const WeakNamespace = self;
let other: *const WeakNamespace = other;
weak == other
}
}
impl BloomHash for Namespace {
#[inline]
fn bloom_hash(&self) -> u32 {
self.0.get_hash()
}
}
impl BloomHash for WeakNamespace {
#[inline]
fn bloom_hash(&self) -> u32 {
self.0.get_hash()
}
}
|
WeakNamespace
|
identifier_name
|
namespace.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use gecko_bindings::structs::nsIAtom;
use selectors::bloom::BloomHash;
use std::borrow::Borrow;
use std::ops::Deref;
use {Atom, WeakAtom};
#[macro_export]
macro_rules! ns {
() => { $crate::Namespace(atom!("")) }
}
#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)]
pub struct Namespace(pub Atom);
pub struct WeakNamespace(WeakAtom);
impl Deref for Namespace {
type Target = WeakNamespace;
#[inline]
fn deref(&self) -> &WeakNamespace {
let weak: *const WeakAtom = &*self.0;
unsafe {
&*(weak as *const WeakNamespace)
}
}
}
impl Borrow<WeakNamespace> for Namespace {
#[inline]
fn borrow(&self) -> &WeakNamespace {
self
}
}
impl WeakNamespace {
#[inline]
pub unsafe fn new<'a>(atom: *mut nsIAtom) -> &'a Self {
&*(atom as *const WeakNamespace)
}
#[inline]
pub fn clone(&self) -> Namespace {
Namespace(self.0.clone())
}
}
impl Eq for WeakNamespace {}
impl PartialEq for WeakNamespace {
#[inline]
fn eq(&self, other: &Self) -> bool {
let weak: *const WeakNamespace = self;
let other: *const WeakNamespace = other;
weak == other
}
}
impl BloomHash for Namespace {
#[inline]
fn bloom_hash(&self) -> u32 {
self.0.get_hash()
}
}
impl BloomHash for WeakNamespace {
#[inline]
fn bloom_hash(&self) -> u32
|
}
|
{
self.0.get_hash()
}
|
identifier_body
|
namespace.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use gecko_bindings::structs::nsIAtom;
use selectors::bloom::BloomHash;
use std::borrow::Borrow;
use std::ops::Deref;
use {Atom, WeakAtom};
#[macro_export]
macro_rules! ns {
() => { $crate::Namespace(atom!("")) }
}
#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)]
pub struct Namespace(pub Atom);
pub struct WeakNamespace(WeakAtom);
impl Deref for Namespace {
type Target = WeakNamespace;
#[inline]
fn deref(&self) -> &WeakNamespace {
let weak: *const WeakAtom = &*self.0;
unsafe {
&*(weak as *const WeakNamespace)
}
}
}
impl Borrow<WeakNamespace> for Namespace {
#[inline]
fn borrow(&self) -> &WeakNamespace {
self
}
}
impl WeakNamespace {
#[inline]
pub unsafe fn new<'a>(atom: *mut nsIAtom) -> &'a Self {
&*(atom as *const WeakNamespace)
}
#[inline]
pub fn clone(&self) -> Namespace {
Namespace(self.0.clone())
}
}
|
#[inline]
fn eq(&self, other: &Self) -> bool {
let weak: *const WeakNamespace = self;
let other: *const WeakNamespace = other;
weak == other
}
}
impl BloomHash for Namespace {
#[inline]
fn bloom_hash(&self) -> u32 {
self.0.get_hash()
}
}
impl BloomHash for WeakNamespace {
#[inline]
fn bloom_hash(&self) -> u32 {
self.0.get_hash()
}
}
|
impl Eq for WeakNamespace {}
impl PartialEq for WeakNamespace {
|
random_line_split
|
fsu-moves-and-copies.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue 4691: Ensure that functional-struct-updates operates
// correctly and moves rather than copy when appropriate.
use std::kinds::marker::NoCopy as NP;
struct ncint { np: NP, v: int }
fn ncint(v: int) -> ncint { ncint { np: NP, v: v } }
struct NoFoo { copied: int, nocopy: ncint, }
impl NoFoo {
fn new(x:int,y:int) -> NoFoo { NoFoo { copied: x, nocopy: ncint(y) } }
}
struct MoveFoo { copied: int, moved: Box<int>, }
impl MoveFoo {
fn new(x:int,y:int) -> MoveFoo { MoveFoo { copied: x, moved: box y } }
}
struct DropNoFoo { inner: NoFoo }
impl DropNoFoo {
fn new(x:int,y:int) -> DropNoFoo { DropNoFoo { inner: NoFoo::new(x,y) } }
}
impl Drop for DropNoFoo { fn drop(&mut self) { } }
struct DropMoveFoo { inner: MoveFoo }
impl DropMoveFoo {
fn new(x:int,y:int) -> DropMoveFoo { DropMoveFoo { inner: MoveFoo::new(x,y) } }
}
impl Drop for DropMoveFoo { fn drop(&mut self) { } }
fn test0() {
// just copy implicitly copyable fields from `f`, no moves
// (and thus it is okay that these are Drop; compare against
// compile-fail test: borrowck-struct-update-with-dtor.rs).
// Case 1: Nocopyable
let f = DropNoFoo::new(1, 2);
let b = DropNoFoo { inner: NoFoo { nocopy: ncint(3),..f.inner }};
let c = DropNoFoo { inner: NoFoo { nocopy: ncint(4),..f.inner }};
assert_eq!(f.inner.copied, 1);
assert_eq!(f.inner.nocopy.v, 2);
assert_eq!(b.inner.copied, 1);
assert_eq!(b.inner.nocopy.v, 3);
assert_eq!(c.inner.copied, 1);
assert_eq!(c.inner.nocopy.v, 4);
// Case 2: Owned
let f = DropMoveFoo::new(5, 6);
let b = DropMoveFoo { inner: MoveFoo { moved: box 7,..f.inner }};
let c = DropMoveFoo { inner: MoveFoo { moved: box 8,..f.inner }};
assert_eq!(f.inner.copied, 5);
assert_eq!(*f.inner.moved, 6);
assert_eq!(b.inner.copied, 5);
assert_eq!(*b.inner.moved, 7);
assert_eq!(c.inner.copied, 5);
assert_eq!(*c.inner.moved, 8);
}
fn test1() {
// copying move-by-default fields from `f`, so it moves:
let f = MoveFoo::new(11, 12);
let b = MoveFoo {moved: box 13,..f};
let c = MoveFoo {copied: 14,..f};
assert_eq!(b.copied, 11);
assert_eq!(*b.moved, 13);
assert_eq!(c.copied, 14);
assert_eq!(*c.moved, 12);
}
fn test2() {
// move non-copyable field
let f = NoFoo::new(21, 22);
let b = NoFoo {nocopy: ncint(23),..f};
let c = NoFoo {copied: 24,..f};
assert_eq!(b.copied, 21);
assert_eq!(b.nocopy.v, 23);
assert_eq!(c.copied, 24);
|
pub fn main() {
test0();
test1();
test2();
}
|
assert_eq!(c.nocopy.v, 22);
}
|
random_line_split
|
fsu-moves-and-copies.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue 4691: Ensure that functional-struct-updates operates
// correctly and moves rather than copy when appropriate.
use std::kinds::marker::NoCopy as NP;
struct ncint { np: NP, v: int }
fn ncint(v: int) -> ncint { ncint { np: NP, v: v } }
struct NoFoo { copied: int, nocopy: ncint, }
impl NoFoo {
fn new(x:int,y:int) -> NoFoo { NoFoo { copied: x, nocopy: ncint(y) } }
}
struct MoveFoo { copied: int, moved: Box<int>, }
impl MoveFoo {
fn new(x:int,y:int) -> MoveFoo { MoveFoo { copied: x, moved: box y } }
}
struct DropNoFoo { inner: NoFoo }
impl DropNoFoo {
fn new(x:int,y:int) -> DropNoFoo
|
}
impl Drop for DropNoFoo { fn drop(&mut self) { } }
struct DropMoveFoo { inner: MoveFoo }
impl DropMoveFoo {
fn new(x:int,y:int) -> DropMoveFoo { DropMoveFoo { inner: MoveFoo::new(x,y) } }
}
impl Drop for DropMoveFoo { fn drop(&mut self) { } }
fn test0() {
// just copy implicitly copyable fields from `f`, no moves
// (and thus it is okay that these are Drop; compare against
// compile-fail test: borrowck-struct-update-with-dtor.rs).
// Case 1: Nocopyable
let f = DropNoFoo::new(1, 2);
let b = DropNoFoo { inner: NoFoo { nocopy: ncint(3),..f.inner }};
let c = DropNoFoo { inner: NoFoo { nocopy: ncint(4),..f.inner }};
assert_eq!(f.inner.copied, 1);
assert_eq!(f.inner.nocopy.v, 2);
assert_eq!(b.inner.copied, 1);
assert_eq!(b.inner.nocopy.v, 3);
assert_eq!(c.inner.copied, 1);
assert_eq!(c.inner.nocopy.v, 4);
// Case 2: Owned
let f = DropMoveFoo::new(5, 6);
let b = DropMoveFoo { inner: MoveFoo { moved: box 7,..f.inner }};
let c = DropMoveFoo { inner: MoveFoo { moved: box 8,..f.inner }};
assert_eq!(f.inner.copied, 5);
assert_eq!(*f.inner.moved, 6);
assert_eq!(b.inner.copied, 5);
assert_eq!(*b.inner.moved, 7);
assert_eq!(c.inner.copied, 5);
assert_eq!(*c.inner.moved, 8);
}
fn test1() {
// copying move-by-default fields from `f`, so it moves:
let f = MoveFoo::new(11, 12);
let b = MoveFoo {moved: box 13,..f};
let c = MoveFoo {copied: 14,..f};
assert_eq!(b.copied, 11);
assert_eq!(*b.moved, 13);
assert_eq!(c.copied, 14);
assert_eq!(*c.moved, 12);
}
fn test2() {
// move non-copyable field
let f = NoFoo::new(21, 22);
let b = NoFoo {nocopy: ncint(23),..f};
let c = NoFoo {copied: 24,..f};
assert_eq!(b.copied, 21);
assert_eq!(b.nocopy.v, 23);
assert_eq!(c.copied, 24);
assert_eq!(c.nocopy.v, 22);
}
pub fn main() {
test0();
test1();
test2();
}
|
{ DropNoFoo { inner: NoFoo::new(x,y) } }
|
identifier_body
|
fsu-moves-and-copies.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue 4691: Ensure that functional-struct-updates operates
// correctly and moves rather than copy when appropriate.
use std::kinds::marker::NoCopy as NP;
struct ncint { np: NP, v: int }
fn ncint(v: int) -> ncint { ncint { np: NP, v: v } }
struct NoFoo { copied: int, nocopy: ncint, }
impl NoFoo {
fn new(x:int,y:int) -> NoFoo { NoFoo { copied: x, nocopy: ncint(y) } }
}
struct MoveFoo { copied: int, moved: Box<int>, }
impl MoveFoo {
fn new(x:int,y:int) -> MoveFoo { MoveFoo { copied: x, moved: box y } }
}
struct DropNoFoo { inner: NoFoo }
impl DropNoFoo {
fn
|
(x:int,y:int) -> DropNoFoo { DropNoFoo { inner: NoFoo::new(x,y) } }
}
impl Drop for DropNoFoo { fn drop(&mut self) { } }
struct DropMoveFoo { inner: MoveFoo }
impl DropMoveFoo {
fn new(x:int,y:int) -> DropMoveFoo { DropMoveFoo { inner: MoveFoo::new(x,y) } }
}
impl Drop for DropMoveFoo { fn drop(&mut self) { } }
fn test0() {
// just copy implicitly copyable fields from `f`, no moves
// (and thus it is okay that these are Drop; compare against
// compile-fail test: borrowck-struct-update-with-dtor.rs).
// Case 1: Nocopyable
let f = DropNoFoo::new(1, 2);
let b = DropNoFoo { inner: NoFoo { nocopy: ncint(3),..f.inner }};
let c = DropNoFoo { inner: NoFoo { nocopy: ncint(4),..f.inner }};
assert_eq!(f.inner.copied, 1);
assert_eq!(f.inner.nocopy.v, 2);
assert_eq!(b.inner.copied, 1);
assert_eq!(b.inner.nocopy.v, 3);
assert_eq!(c.inner.copied, 1);
assert_eq!(c.inner.nocopy.v, 4);
// Case 2: Owned
let f = DropMoveFoo::new(5, 6);
let b = DropMoveFoo { inner: MoveFoo { moved: box 7,..f.inner }};
let c = DropMoveFoo { inner: MoveFoo { moved: box 8,..f.inner }};
assert_eq!(f.inner.copied, 5);
assert_eq!(*f.inner.moved, 6);
assert_eq!(b.inner.copied, 5);
assert_eq!(*b.inner.moved, 7);
assert_eq!(c.inner.copied, 5);
assert_eq!(*c.inner.moved, 8);
}
fn test1() {
// copying move-by-default fields from `f`, so it moves:
let f = MoveFoo::new(11, 12);
let b = MoveFoo {moved: box 13,..f};
let c = MoveFoo {copied: 14,..f};
assert_eq!(b.copied, 11);
assert_eq!(*b.moved, 13);
assert_eq!(c.copied, 14);
assert_eq!(*c.moved, 12);
}
fn test2() {
// move non-copyable field
let f = NoFoo::new(21, 22);
let b = NoFoo {nocopy: ncint(23),..f};
let c = NoFoo {copied: 24,..f};
assert_eq!(b.copied, 21);
assert_eq!(b.nocopy.v, 23);
assert_eq!(c.copied, 24);
assert_eq!(c.nocopy.v, 22);
}
pub fn main() {
test0();
test1();
test2();
}
|
new
|
identifier_name
|
slnfile.rs
|
use std::path::Path;
use std::fs::File;
|
use visualstudio::{ProjDesc, escape};
pub struct SlnFile {
projects: Vec<ProjDesc>,
}
impl SlnFile {
pub fn new() -> SlnFile {
SlnFile {
projects: Vec::new()
}
}
pub fn add_project(&mut self, proj: ProjDesc) {
self.projects.push(proj);
}
pub fn write_to<P: AsRef<Path>>(&self, path: P) {
let mut file = File::create(path).unwrap();
// Generic version metadata
writeln!(file, "Microsoft Visual Studio Solution File, Format Version 12.00").unwrap();
writeln!(file, "# Visual Studio 14").unwrap();
writeln!(file, "VisualStudioVersion = 14.0.25420.1").unwrap();
writeln!(file, "MinimumVisualStudioVersion = 10.0.40219.1").unwrap();
// Write all projects
for project in &self.projects {
writeln!(
file, // The hardcoded GUID here is the C++ project type
"Project(\"{}\") = \"{}\", \"{}\", \"{{{}}}\"",
"{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}",
project.name, escape(format!("{}", project.vcxproj_path.display())), project.uuid.hyphenated()
).unwrap();
writeln!(file, "EndProject").unwrap();
}
}
}
|
use std::io::Write;
|
random_line_split
|
slnfile.rs
|
use std::path::Path;
use std::fs::File;
use std::io::Write;
use visualstudio::{ProjDesc, escape};
pub struct SlnFile {
projects: Vec<ProjDesc>,
}
impl SlnFile {
pub fn new() -> SlnFile
|
pub fn add_project(&mut self, proj: ProjDesc) {
self.projects.push(proj);
}
pub fn write_to<P: AsRef<Path>>(&self, path: P) {
let mut file = File::create(path).unwrap();
// Generic version metadata
writeln!(file, "Microsoft Visual Studio Solution File, Format Version 12.00").unwrap();
writeln!(file, "# Visual Studio 14").unwrap();
writeln!(file, "VisualStudioVersion = 14.0.25420.1").unwrap();
writeln!(file, "MinimumVisualStudioVersion = 10.0.40219.1").unwrap();
// Write all projects
for project in &self.projects {
writeln!(
file, // The hardcoded GUID here is the C++ project type
"Project(\"{}\") = \"{}\", \"{}\", \"{{{}}}\"",
"{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}",
project.name, escape(format!("{}", project.vcxproj_path.display())), project.uuid.hyphenated()
).unwrap();
writeln!(file, "EndProject").unwrap();
}
}
}
|
{
SlnFile {
projects: Vec::new()
}
}
|
identifier_body
|
slnfile.rs
|
use std::path::Path;
use std::fs::File;
use std::io::Write;
use visualstudio::{ProjDesc, escape};
pub struct SlnFile {
projects: Vec<ProjDesc>,
}
impl SlnFile {
pub fn new() -> SlnFile {
SlnFile {
projects: Vec::new()
}
}
pub fn
|
(&mut self, proj: ProjDesc) {
self.projects.push(proj);
}
pub fn write_to<P: AsRef<Path>>(&self, path: P) {
let mut file = File::create(path).unwrap();
// Generic version metadata
writeln!(file, "Microsoft Visual Studio Solution File, Format Version 12.00").unwrap();
writeln!(file, "# Visual Studio 14").unwrap();
writeln!(file, "VisualStudioVersion = 14.0.25420.1").unwrap();
writeln!(file, "MinimumVisualStudioVersion = 10.0.40219.1").unwrap();
// Write all projects
for project in &self.projects {
writeln!(
file, // The hardcoded GUID here is the C++ project type
"Project(\"{}\") = \"{}\", \"{}\", \"{{{}}}\"",
"{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}",
project.name, escape(format!("{}", project.vcxproj_path.display())), project.uuid.hyphenated()
).unwrap();
writeln!(file, "EndProject").unwrap();
}
}
}
|
add_project
|
identifier_name
|
mod.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Computed values.
use app_units::Au;
use euclid::size::Size2D;
use font_metrics::FontMetricsProvider;
use media_queries::Device;
use properties::ComputedValues;
use std::fmt;
use style_traits::ToCss;
use super::{CSSFloat, RGBA, specified};
use super::specified::grid::{TrackBreadth as GenericTrackBreadth, TrackSize as GenericTrackSize};
pub use cssparser::Color as CSSColor;
pub use self::image::{AngleOrCorner, EndingShape as GradientShape, Gradient, GradientKind, Image};
pub use self::image::{LengthOrKeyword, LengthOrPercentageOrKeyword};
pub use super::{Auto, Either, None_};
#[cfg(feature = "gecko")]
pub use super::specified::{AlignItems, AlignJustifyContent, AlignJustifySelf, JustifyItems};
pub use super::specified::{Angle, BorderStyle, GridLine, Time, UrlOrNone};
pub use super::specified::url::{SpecifiedUrl, UrlExtraData};
pub use self::length::{CalcLengthOrPercentage, Length, LengthOrNumber, LengthOrPercentage, LengthOrPercentageOrAuto};
pub use self::length::{LengthOrPercentageOrAutoOrContent, LengthOrPercentageOrNone, LengthOrNone};
pub use self::length::{MaxLength, MinLength};
pub use self::position::Position;
pub mod basic_shape;
pub mod image;
pub mod length;
pub mod position;
/// A `Context` is all the data a specified value could ever need to compute
/// itself and be transformed to a computed value.
pub struct Context<'a> {
/// Whether the current element is the root element.
pub is_root_element: bool,
/// The Device holds the viewport and other external state.
pub device: &'a Device,
/// The style we're inheriting from.
pub inherited_style: &'a ComputedValues,
/// The style of the layout parent node. This will almost always be
/// `inherited_style`, except when `display: contents` is at play, in which
/// case it's the style of the last ancestor with a `display` value that
/// isn't `contents`.
pub layout_parent_style: &'a ComputedValues,
/// Values access through this need to be in the properties "computed
/// early": color, text-decoration, font-size, display, position, float,
/// border-*-style, outline-style, font-family, writing-mode...
pub style: ComputedValues,
/// A font metrics provider, used to access font metrics to implement
/// font-relative units.
///
/// TODO(emilio): This should be required, see #14079.
pub font_metrics_provider: Option<&'a FontMetricsProvider>,
}
impl<'a> Context<'a> {
/// Whether the current element is the root element.
pub fn is_root_element(&self) -> bool { self.is_root_element }
/// The current viewport size.
pub fn viewport_size(&self) -> Size2D<Au> { self.device.au_viewport_size() }
/// The style we're inheriting from.
pub fn inherited_style(&self) -> &ComputedValues { &self.inherited_style }
/// The current style. Note that only "eager" properties should be accessed
/// from here, see the comment in the member.
pub fn style(&self) -> &ComputedValues { &self.style }
/// A mutable reference to the current style.
|
/// A trait to represent the conversion between computed and specified values.
pub trait ToComputedValue {
/// The computed value type we're going to be converted to.
type ComputedValue;
/// Convert a specified value to a computed value, using itself and the data
/// inside the `Context`.
#[inline]
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue;
#[inline]
/// Convert a computed value to specified value form.
///
/// This will be used for recascading during animation.
/// Such from_computed_valued values should recompute to the same value.
fn from_computed_value(computed: &Self::ComputedValue) -> Self;
}
/// A marker trait to represent that the specified value is also the computed
/// value.
pub trait ComputedValueAsSpecified {}
impl<T> ToComputedValue for T
where T: ComputedValueAsSpecified + Clone,
{
type ComputedValue = T;
#[inline]
fn to_computed_value(&self, _context: &Context) -> T {
self.clone()
}
#[inline]
fn from_computed_value(computed: &T) -> Self {
computed.clone()
}
}
impl ToComputedValue for specified::Color {
type ComputedValue = RGBA;
#[cfg(not(feature = "gecko"))]
fn to_computed_value(&self, context: &Context) -> RGBA {
match *self {
specified::Color::RGBA(rgba) => rgba,
specified::Color::CurrentColor => context.inherited_style.get_color().clone_color(),
}
}
#[cfg(feature = "gecko")]
fn to_computed_value(&self, context: &Context) -> RGBA {
use gecko::values::convert_nscolor_to_rgba as to_rgba;
// It's safe to access the nsPresContext immutably during style computation.
let pres_context = unsafe { &*context.device.pres_context };
match *self {
specified::Color::RGBA(rgba) => rgba,
specified::Color::System(system) => to_rgba(system.to_computed_value(context)),
specified::Color::CurrentColor => context.inherited_style.get_color().clone_color(),
specified::Color::MozDefaultColor => to_rgba(pres_context.mDefaultColor),
specified::Color::MozDefaultBackgroundColor => to_rgba(pres_context.mBackgroundColor),
specified::Color::MozHyperlinktext => to_rgba(pres_context.mLinkColor),
specified::Color::MozActiveHyperlinktext => to_rgba(pres_context.mActiveLinkColor),
specified::Color::MozVisitedHyperlinktext => to_rgba(pres_context.mVisitedLinkColor),
}
}
fn from_computed_value(computed: &RGBA) -> Self {
specified::Color::RGBA(*computed)
}
}
impl ToComputedValue for specified::CSSColor {
type ComputedValue = CSSColor;
#[cfg(not(feature = "gecko"))]
#[inline]
fn to_computed_value(&self, _context: &Context) -> CSSColor {
self.parsed
}
#[cfg(feature = "gecko")]
#[inline]
fn to_computed_value(&self, context: &Context) -> CSSColor {
match self.parsed {
specified::Color::RGBA(rgba) => CSSColor::RGBA(rgba),
specified::Color::CurrentColor => CSSColor::CurrentColor,
// Resolve non-standard -moz keywords to RGBA:
non_standard => CSSColor::RGBA(non_standard.to_computed_value(context)),
}
}
#[inline]
fn from_computed_value(computed: &CSSColor) -> Self {
specified::CSSColor {
parsed: match *computed {
CSSColor::RGBA(rgba) => specified::Color::RGBA(rgba),
CSSColor::CurrentColor => specified::Color::CurrentColor,
},
authored: None,
}
}
}
#[cfg(feature = "gecko")]
impl ToComputedValue for specified::JustifyItems {
type ComputedValue = JustifyItems;
// https://drafts.csswg.org/css-align/#valdef-justify-items-auto
fn to_computed_value(&self, context: &Context) -> JustifyItems {
use values::specified::align;
// If the inherited value of `justify-items` includes the `legacy` keyword, `auto` computes
// to the inherited value.
if self.0 == align::ALIGN_AUTO {
let inherited = context.inherited_style.get_position().clone_justify_items();
if inherited.0.contains(align::ALIGN_LEGACY) {
return inherited
}
}
return *self
}
#[inline]
fn from_computed_value(computed: &JustifyItems) -> Self {
*computed
}
}
#[cfg(feature = "gecko")]
impl ComputedValueAsSpecified for specified::AlignItems {}
#[cfg(feature = "gecko")]
impl ComputedValueAsSpecified for specified::AlignJustifyContent {}
#[cfg(feature = "gecko")]
impl ComputedValueAsSpecified for specified::AlignJustifySelf {}
impl ComputedValueAsSpecified for specified::BorderStyle {}
#[derive(Debug, PartialEq, Clone, Copy)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[allow(missing_docs)]
pub struct BorderRadiusSize(pub Size2D<LengthOrPercentage>);
impl BorderRadiusSize {
#[allow(missing_docs)]
pub fn zero() -> BorderRadiusSize {
BorderRadiusSize(Size2D::new(LengthOrPercentage::Length(Au(0)), LengthOrPercentage::Length(Au(0))))
}
}
impl ToComputedValue for specified::BorderRadiusSize {
type ComputedValue = BorderRadiusSize;
#[inline]
fn to_computed_value(&self, context: &Context) -> BorderRadiusSize {
let w = self.0.width.to_computed_value(context);
let h = self.0.height.to_computed_value(context);
BorderRadiusSize(Size2D::new(w, h))
}
#[inline]
fn from_computed_value(computed: &BorderRadiusSize) -> Self {
let w = ToComputedValue::from_computed_value(&computed.0.width);
let h = ToComputedValue::from_computed_value(&computed.0.height);
specified::BorderRadiusSize(Size2D::new(w, h))
}
}
impl ToCss for BorderRadiusSize {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
try!(self.0.width.to_css(dest));
try!(dest.write_str("/"));
self.0.height.to_css(dest)
}
}
#[derive(Debug, PartialEq, Clone, Copy)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[allow(missing_docs)]
pub struct Shadow {
pub offset_x: Au,
pub offset_y: Au,
pub blur_radius: Au,
pub spread_radius: Au,
pub color: CSSColor,
pub inset: bool,
}
/// A `<number>` value.
pub type Number = CSSFloat;
/// A type used for opacity.
pub type Opacity = CSSFloat;
/// An SVG paint value
///
/// https://www.w3.org/TR/SVG2/painting.html#SpecifyingPaint
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct SVGPaint {
/// The paint source
pub kind: SVGPaintKind,
/// The fallback color
pub fallback: Option<CSSColor>,
}
impl Default for SVGPaint {
fn default() -> Self {
SVGPaint {
kind: SVGPaintKind::None,
fallback: None,
}
}
}
impl SVGPaint {
/// Opaque black color
pub fn black() -> Self {
let rgba = RGBA::from_floats(0., 0., 0., 1.);
SVGPaint {
kind: SVGPaintKind::Color(CSSColor::RGBA(rgba)),
fallback: None,
}
}
}
/// An SVG paint value without the fallback
///
/// Whereas the spec only allows PaintServer
/// to have a fallback, Gecko lets the context
/// properties have a fallback as well.
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum SVGPaintKind {
/// `none`
None,
/// `<color>`
Color(CSSColor),
/// `url(...)`
PaintServer(SpecifiedUrl),
/// `context-fill`
ContextFill,
/// `context-stroke`
ContextStroke,
}
impl ToCss for SVGPaintKind {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match *self {
SVGPaintKind::None => dest.write_str("none"),
SVGPaintKind::ContextStroke => dest.write_str("context-stroke"),
SVGPaintKind::ContextFill => dest.write_str("context-fill"),
SVGPaintKind::Color(ref color) => color.to_css(dest),
SVGPaintKind::PaintServer(ref server) => server.to_css(dest),
}
}
}
impl ToCss for SVGPaint {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
self.kind.to_css(dest)?;
if let Some(ref fallback) = self.fallback {
fallback.to_css(dest)?;
}
Ok(())
}
}
/// <length> | <percentage> | <number>
pub type LoPOrNumber = Either<LengthOrPercentage, Number>;
#[derive(Clone, PartialEq, Eq, Copy, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[allow(missing_docs)]
/// A computed cliprect for clip and image-region
pub struct ClipRect {
pub top: Option<Au>,
pub right: Option<Au>,
pub bottom: Option<Au>,
pub left: Option<Au>,
}
impl ToCss for ClipRect {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
try!(dest.write_str("rect("));
if let Some(top) = self.top {
try!(top.to_css(dest));
try!(dest.write_str(", "));
} else {
try!(dest.write_str("auto, "));
}
if let Some(right) = self.right {
try!(right.to_css(dest));
try!(dest.write_str(", "));
} else {
try!(dest.write_str("auto, "));
}
if let Some(bottom) = self.bottom {
try!(bottom.to_css(dest));
try!(dest.write_str(", "));
} else {
try!(dest.write_str("auto, "));
}
if let Some(left) = self.left {
try!(left.to_css(dest));
} else {
try!(dest.write_str("auto"));
}
dest.write_str(")")
}
}
/// rect(...) | auto
pub type ClipRectOrAuto = Either<ClipRect, Auto>;
/// The computed value of a grid `<track-breadth>`
pub type TrackBreadth = GenericTrackBreadth<LengthOrPercentage>;
/// The computed value of a grid `<track-size>`
pub type TrackSize = GenericTrackSize<LengthOrPercentage>;
impl ClipRectOrAuto {
/// Return an auto (default for clip-rect and image-region) value
pub fn auto() -> Self {
Either::Second(Auto)
}
/// Check if it is auto
pub fn is_auto(&self) -> bool {
match *self {
Either::Second(_) => true,
_ => false
}
}
}
/// <color> | auto
pub type ColorOrAuto = Either<CSSColor, Auto>;
|
pub fn mutate_style(&mut self) -> &mut ComputedValues { &mut self.style }
}
|
random_line_split
|
mod.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Computed values.
use app_units::Au;
use euclid::size::Size2D;
use font_metrics::FontMetricsProvider;
use media_queries::Device;
use properties::ComputedValues;
use std::fmt;
use style_traits::ToCss;
use super::{CSSFloat, RGBA, specified};
use super::specified::grid::{TrackBreadth as GenericTrackBreadth, TrackSize as GenericTrackSize};
pub use cssparser::Color as CSSColor;
pub use self::image::{AngleOrCorner, EndingShape as GradientShape, Gradient, GradientKind, Image};
pub use self::image::{LengthOrKeyword, LengthOrPercentageOrKeyword};
pub use super::{Auto, Either, None_};
#[cfg(feature = "gecko")]
pub use super::specified::{AlignItems, AlignJustifyContent, AlignJustifySelf, JustifyItems};
pub use super::specified::{Angle, BorderStyle, GridLine, Time, UrlOrNone};
pub use super::specified::url::{SpecifiedUrl, UrlExtraData};
pub use self::length::{CalcLengthOrPercentage, Length, LengthOrNumber, LengthOrPercentage, LengthOrPercentageOrAuto};
pub use self::length::{LengthOrPercentageOrAutoOrContent, LengthOrPercentageOrNone, LengthOrNone};
pub use self::length::{MaxLength, MinLength};
pub use self::position::Position;
pub mod basic_shape;
pub mod image;
pub mod length;
pub mod position;
/// A `Context` is all the data a specified value could ever need to compute
/// itself and be transformed to a computed value.
pub struct Context<'a> {
/// Whether the current element is the root element.
pub is_root_element: bool,
/// The Device holds the viewport and other external state.
pub device: &'a Device,
/// The style we're inheriting from.
pub inherited_style: &'a ComputedValues,
/// The style of the layout parent node. This will almost always be
/// `inherited_style`, except when `display: contents` is at play, in which
/// case it's the style of the last ancestor with a `display` value that
/// isn't `contents`.
pub layout_parent_style: &'a ComputedValues,
/// Values access through this need to be in the properties "computed
/// early": color, text-decoration, font-size, display, position, float,
/// border-*-style, outline-style, font-family, writing-mode...
pub style: ComputedValues,
/// A font metrics provider, used to access font metrics to implement
/// font-relative units.
///
/// TODO(emilio): This should be required, see #14079.
pub font_metrics_provider: Option<&'a FontMetricsProvider>,
}
impl<'a> Context<'a> {
/// Whether the current element is the root element.
pub fn is_root_element(&self) -> bool { self.is_root_element }
/// The current viewport size.
pub fn viewport_size(&self) -> Size2D<Au> { self.device.au_viewport_size() }
/// The style we're inheriting from.
pub fn inherited_style(&self) -> &ComputedValues { &self.inherited_style }
/// The current style. Note that only "eager" properties should be accessed
/// from here, see the comment in the member.
pub fn style(&self) -> &ComputedValues { &self.style }
/// A mutable reference to the current style.
pub fn mutate_style(&mut self) -> &mut ComputedValues { &mut self.style }
}
/// A trait to represent the conversion between computed and specified values.
pub trait ToComputedValue {
/// The computed value type we're going to be converted to.
type ComputedValue;
/// Convert a specified value to a computed value, using itself and the data
/// inside the `Context`.
#[inline]
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue;
#[inline]
/// Convert a computed value to specified value form.
///
/// This will be used for recascading during animation.
/// Such from_computed_valued values should recompute to the same value.
fn from_computed_value(computed: &Self::ComputedValue) -> Self;
}
/// A marker trait to represent that the specified value is also the computed
/// value.
pub trait ComputedValueAsSpecified {}
impl<T> ToComputedValue for T
where T: ComputedValueAsSpecified + Clone,
{
type ComputedValue = T;
#[inline]
fn to_computed_value(&self, _context: &Context) -> T {
self.clone()
}
#[inline]
fn from_computed_value(computed: &T) -> Self {
computed.clone()
}
}
impl ToComputedValue for specified::Color {
type ComputedValue = RGBA;
#[cfg(not(feature = "gecko"))]
fn to_computed_value(&self, context: &Context) -> RGBA {
match *self {
specified::Color::RGBA(rgba) => rgba,
specified::Color::CurrentColor => context.inherited_style.get_color().clone_color(),
}
}
#[cfg(feature = "gecko")]
fn to_computed_value(&self, context: &Context) -> RGBA
|
fn from_computed_value(computed: &RGBA) -> Self {
specified::Color::RGBA(*computed)
}
}
impl ToComputedValue for specified::CSSColor {
type ComputedValue = CSSColor;
#[cfg(not(feature = "gecko"))]
#[inline]
fn to_computed_value(&self, _context: &Context) -> CSSColor {
self.parsed
}
#[cfg(feature = "gecko")]
#[inline]
fn to_computed_value(&self, context: &Context) -> CSSColor {
match self.parsed {
specified::Color::RGBA(rgba) => CSSColor::RGBA(rgba),
specified::Color::CurrentColor => CSSColor::CurrentColor,
// Resolve non-standard -moz keywords to RGBA:
non_standard => CSSColor::RGBA(non_standard.to_computed_value(context)),
}
}
#[inline]
fn from_computed_value(computed: &CSSColor) -> Self {
specified::CSSColor {
parsed: match *computed {
CSSColor::RGBA(rgba) => specified::Color::RGBA(rgba),
CSSColor::CurrentColor => specified::Color::CurrentColor,
},
authored: None,
}
}
}
#[cfg(feature = "gecko")]
impl ToComputedValue for specified::JustifyItems {
type ComputedValue = JustifyItems;
// https://drafts.csswg.org/css-align/#valdef-justify-items-auto
fn to_computed_value(&self, context: &Context) -> JustifyItems {
use values::specified::align;
// If the inherited value of `justify-items` includes the `legacy` keyword, `auto` computes
// to the inherited value.
if self.0 == align::ALIGN_AUTO {
let inherited = context.inherited_style.get_position().clone_justify_items();
if inherited.0.contains(align::ALIGN_LEGACY) {
return inherited
}
}
return *self
}
#[inline]
fn from_computed_value(computed: &JustifyItems) -> Self {
*computed
}
}
#[cfg(feature = "gecko")]
impl ComputedValueAsSpecified for specified::AlignItems {}
#[cfg(feature = "gecko")]
impl ComputedValueAsSpecified for specified::AlignJustifyContent {}
#[cfg(feature = "gecko")]
impl ComputedValueAsSpecified for specified::AlignJustifySelf {}
impl ComputedValueAsSpecified for specified::BorderStyle {}
#[derive(Debug, PartialEq, Clone, Copy)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[allow(missing_docs)]
pub struct BorderRadiusSize(pub Size2D<LengthOrPercentage>);
impl BorderRadiusSize {
#[allow(missing_docs)]
pub fn zero() -> BorderRadiusSize {
BorderRadiusSize(Size2D::new(LengthOrPercentage::Length(Au(0)), LengthOrPercentage::Length(Au(0))))
}
}
impl ToComputedValue for specified::BorderRadiusSize {
type ComputedValue = BorderRadiusSize;
#[inline]
fn to_computed_value(&self, context: &Context) -> BorderRadiusSize {
let w = self.0.width.to_computed_value(context);
let h = self.0.height.to_computed_value(context);
BorderRadiusSize(Size2D::new(w, h))
}
#[inline]
fn from_computed_value(computed: &BorderRadiusSize) -> Self {
let w = ToComputedValue::from_computed_value(&computed.0.width);
let h = ToComputedValue::from_computed_value(&computed.0.height);
specified::BorderRadiusSize(Size2D::new(w, h))
}
}
impl ToCss for BorderRadiusSize {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
try!(self.0.width.to_css(dest));
try!(dest.write_str("/"));
self.0.height.to_css(dest)
}
}
#[derive(Debug, PartialEq, Clone, Copy)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[allow(missing_docs)]
pub struct Shadow {
pub offset_x: Au,
pub offset_y: Au,
pub blur_radius: Au,
pub spread_radius: Au,
pub color: CSSColor,
pub inset: bool,
}
/// A `<number>` value.
pub type Number = CSSFloat;
/// A type used for opacity.
pub type Opacity = CSSFloat;
/// An SVG paint value
///
/// https://www.w3.org/TR/SVG2/painting.html#SpecifyingPaint
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct SVGPaint {
/// The paint source
pub kind: SVGPaintKind,
/// The fallback color
pub fallback: Option<CSSColor>,
}
impl Default for SVGPaint {
fn default() -> Self {
SVGPaint {
kind: SVGPaintKind::None,
fallback: None,
}
}
}
impl SVGPaint {
/// Opaque black color
pub fn black() -> Self {
let rgba = RGBA::from_floats(0., 0., 0., 1.);
SVGPaint {
kind: SVGPaintKind::Color(CSSColor::RGBA(rgba)),
fallback: None,
}
}
}
/// An SVG paint value without the fallback
///
/// Whereas the spec only allows PaintServer
/// to have a fallback, Gecko lets the context
/// properties have a fallback as well.
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum SVGPaintKind {
/// `none`
None,
/// `<color>`
Color(CSSColor),
/// `url(...)`
PaintServer(SpecifiedUrl),
/// `context-fill`
ContextFill,
/// `context-stroke`
ContextStroke,
}
impl ToCss for SVGPaintKind {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match *self {
SVGPaintKind::None => dest.write_str("none"),
SVGPaintKind::ContextStroke => dest.write_str("context-stroke"),
SVGPaintKind::ContextFill => dest.write_str("context-fill"),
SVGPaintKind::Color(ref color) => color.to_css(dest),
SVGPaintKind::PaintServer(ref server) => server.to_css(dest),
}
}
}
impl ToCss for SVGPaint {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
self.kind.to_css(dest)?;
if let Some(ref fallback) = self.fallback {
fallback.to_css(dest)?;
}
Ok(())
}
}
/// <length> | <percentage> | <number>
pub type LoPOrNumber = Either<LengthOrPercentage, Number>;
#[derive(Clone, PartialEq, Eq, Copy, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[allow(missing_docs)]
/// A computed cliprect for clip and image-region
pub struct ClipRect {
pub top: Option<Au>,
pub right: Option<Au>,
pub bottom: Option<Au>,
pub left: Option<Au>,
}
impl ToCss for ClipRect {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
try!(dest.write_str("rect("));
if let Some(top) = self.top {
try!(top.to_css(dest));
try!(dest.write_str(", "));
} else {
try!(dest.write_str("auto, "));
}
if let Some(right) = self.right {
try!(right.to_css(dest));
try!(dest.write_str(", "));
} else {
try!(dest.write_str("auto, "));
}
if let Some(bottom) = self.bottom {
try!(bottom.to_css(dest));
try!(dest.write_str(", "));
} else {
try!(dest.write_str("auto, "));
}
if let Some(left) = self.left {
try!(left.to_css(dest));
} else {
try!(dest.write_str("auto"));
}
dest.write_str(")")
}
}
/// rect(...) | auto
pub type ClipRectOrAuto = Either<ClipRect, Auto>;
/// The computed value of a grid `<track-breadth>`
pub type TrackBreadth = GenericTrackBreadth<LengthOrPercentage>;
/// The computed value of a grid `<track-size>`
pub type TrackSize = GenericTrackSize<LengthOrPercentage>;
impl ClipRectOrAuto {
/// Return an auto (default for clip-rect and image-region) value
pub fn auto() -> Self {
Either::Second(Auto)
}
/// Check if it is auto
pub fn is_auto(&self) -> bool {
match *self {
Either::Second(_) => true,
_ => false
}
}
}
/// <color> | auto
pub type ColorOrAuto = Either<CSSColor, Auto>;
|
{
use gecko::values::convert_nscolor_to_rgba as to_rgba;
// It's safe to access the nsPresContext immutably during style computation.
let pres_context = unsafe { &*context.device.pres_context };
match *self {
specified::Color::RGBA(rgba) => rgba,
specified::Color::System(system) => to_rgba(system.to_computed_value(context)),
specified::Color::CurrentColor => context.inherited_style.get_color().clone_color(),
specified::Color::MozDefaultColor => to_rgba(pres_context.mDefaultColor),
specified::Color::MozDefaultBackgroundColor => to_rgba(pres_context.mBackgroundColor),
specified::Color::MozHyperlinktext => to_rgba(pres_context.mLinkColor),
specified::Color::MozActiveHyperlinktext => to_rgba(pres_context.mActiveLinkColor),
specified::Color::MozVisitedHyperlinktext => to_rgba(pres_context.mVisitedLinkColor),
}
}
|
identifier_body
|
mod.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Computed values.
use app_units::Au;
use euclid::size::Size2D;
use font_metrics::FontMetricsProvider;
use media_queries::Device;
use properties::ComputedValues;
use std::fmt;
use style_traits::ToCss;
use super::{CSSFloat, RGBA, specified};
use super::specified::grid::{TrackBreadth as GenericTrackBreadth, TrackSize as GenericTrackSize};
pub use cssparser::Color as CSSColor;
pub use self::image::{AngleOrCorner, EndingShape as GradientShape, Gradient, GradientKind, Image};
pub use self::image::{LengthOrKeyword, LengthOrPercentageOrKeyword};
pub use super::{Auto, Either, None_};
#[cfg(feature = "gecko")]
pub use super::specified::{AlignItems, AlignJustifyContent, AlignJustifySelf, JustifyItems};
pub use super::specified::{Angle, BorderStyle, GridLine, Time, UrlOrNone};
pub use super::specified::url::{SpecifiedUrl, UrlExtraData};
pub use self::length::{CalcLengthOrPercentage, Length, LengthOrNumber, LengthOrPercentage, LengthOrPercentageOrAuto};
pub use self::length::{LengthOrPercentageOrAutoOrContent, LengthOrPercentageOrNone, LengthOrNone};
pub use self::length::{MaxLength, MinLength};
pub use self::position::Position;
pub mod basic_shape;
pub mod image;
pub mod length;
pub mod position;
/// A `Context` is all the data a specified value could ever need to compute
/// itself and be transformed to a computed value.
pub struct Context<'a> {
/// Whether the current element is the root element.
pub is_root_element: bool,
/// The Device holds the viewport and other external state.
pub device: &'a Device,
/// The style we're inheriting from.
pub inherited_style: &'a ComputedValues,
/// The style of the layout parent node. This will almost always be
/// `inherited_style`, except when `display: contents` is at play, in which
/// case it's the style of the last ancestor with a `display` value that
/// isn't `contents`.
pub layout_parent_style: &'a ComputedValues,
/// Values access through this need to be in the properties "computed
/// early": color, text-decoration, font-size, display, position, float,
/// border-*-style, outline-style, font-family, writing-mode...
pub style: ComputedValues,
/// A font metrics provider, used to access font metrics to implement
/// font-relative units.
///
/// TODO(emilio): This should be required, see #14079.
pub font_metrics_provider: Option<&'a FontMetricsProvider>,
}
impl<'a> Context<'a> {
/// Whether the current element is the root element.
pub fn is_root_element(&self) -> bool { self.is_root_element }
/// The current viewport size.
pub fn viewport_size(&self) -> Size2D<Au> { self.device.au_viewport_size() }
/// The style we're inheriting from.
pub fn inherited_style(&self) -> &ComputedValues { &self.inherited_style }
/// The current style. Note that only "eager" properties should be accessed
/// from here, see the comment in the member.
pub fn style(&self) -> &ComputedValues { &self.style }
/// A mutable reference to the current style.
pub fn mutate_style(&mut self) -> &mut ComputedValues { &mut self.style }
}
/// A trait to represent the conversion between computed and specified values.
pub trait ToComputedValue {
/// The computed value type we're going to be converted to.
type ComputedValue;
/// Convert a specified value to a computed value, using itself and the data
/// inside the `Context`.
#[inline]
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue;
#[inline]
/// Convert a computed value to specified value form.
///
/// This will be used for recascading during animation.
/// Such from_computed_valued values should recompute to the same value.
fn from_computed_value(computed: &Self::ComputedValue) -> Self;
}
/// A marker trait to represent that the specified value is also the computed
/// value.
pub trait ComputedValueAsSpecified {}
impl<T> ToComputedValue for T
where T: ComputedValueAsSpecified + Clone,
{
type ComputedValue = T;
#[inline]
fn to_computed_value(&self, _context: &Context) -> T {
self.clone()
}
#[inline]
fn from_computed_value(computed: &T) -> Self {
computed.clone()
}
}
impl ToComputedValue for specified::Color {
type ComputedValue = RGBA;
#[cfg(not(feature = "gecko"))]
fn to_computed_value(&self, context: &Context) -> RGBA {
match *self {
specified::Color::RGBA(rgba) => rgba,
specified::Color::CurrentColor => context.inherited_style.get_color().clone_color(),
}
}
#[cfg(feature = "gecko")]
fn to_computed_value(&self, context: &Context) -> RGBA {
use gecko::values::convert_nscolor_to_rgba as to_rgba;
// It's safe to access the nsPresContext immutably during style computation.
let pres_context = unsafe { &*context.device.pres_context };
match *self {
specified::Color::RGBA(rgba) => rgba,
specified::Color::System(system) => to_rgba(system.to_computed_value(context)),
specified::Color::CurrentColor => context.inherited_style.get_color().clone_color(),
specified::Color::MozDefaultColor => to_rgba(pres_context.mDefaultColor),
specified::Color::MozDefaultBackgroundColor => to_rgba(pres_context.mBackgroundColor),
specified::Color::MozHyperlinktext => to_rgba(pres_context.mLinkColor),
specified::Color::MozActiveHyperlinktext => to_rgba(pres_context.mActiveLinkColor),
specified::Color::MozVisitedHyperlinktext => to_rgba(pres_context.mVisitedLinkColor),
}
}
fn from_computed_value(computed: &RGBA) -> Self {
specified::Color::RGBA(*computed)
}
}
impl ToComputedValue for specified::CSSColor {
type ComputedValue = CSSColor;
#[cfg(not(feature = "gecko"))]
#[inline]
fn to_computed_value(&self, _context: &Context) -> CSSColor {
self.parsed
}
#[cfg(feature = "gecko")]
#[inline]
fn to_computed_value(&self, context: &Context) -> CSSColor {
match self.parsed {
specified::Color::RGBA(rgba) => CSSColor::RGBA(rgba),
specified::Color::CurrentColor => CSSColor::CurrentColor,
// Resolve non-standard -moz keywords to RGBA:
non_standard => CSSColor::RGBA(non_standard.to_computed_value(context)),
}
}
#[inline]
fn
|
(computed: &CSSColor) -> Self {
specified::CSSColor {
parsed: match *computed {
CSSColor::RGBA(rgba) => specified::Color::RGBA(rgba),
CSSColor::CurrentColor => specified::Color::CurrentColor,
},
authored: None,
}
}
}
#[cfg(feature = "gecko")]
impl ToComputedValue for specified::JustifyItems {
type ComputedValue = JustifyItems;
// https://drafts.csswg.org/css-align/#valdef-justify-items-auto
fn to_computed_value(&self, context: &Context) -> JustifyItems {
use values::specified::align;
// If the inherited value of `justify-items` includes the `legacy` keyword, `auto` computes
// to the inherited value.
if self.0 == align::ALIGN_AUTO {
let inherited = context.inherited_style.get_position().clone_justify_items();
if inherited.0.contains(align::ALIGN_LEGACY) {
return inherited
}
}
return *self
}
#[inline]
fn from_computed_value(computed: &JustifyItems) -> Self {
*computed
}
}
#[cfg(feature = "gecko")]
impl ComputedValueAsSpecified for specified::AlignItems {}
#[cfg(feature = "gecko")]
impl ComputedValueAsSpecified for specified::AlignJustifyContent {}
#[cfg(feature = "gecko")]
impl ComputedValueAsSpecified for specified::AlignJustifySelf {}
impl ComputedValueAsSpecified for specified::BorderStyle {}
#[derive(Debug, PartialEq, Clone, Copy)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[allow(missing_docs)]
pub struct BorderRadiusSize(pub Size2D<LengthOrPercentage>);
impl BorderRadiusSize {
#[allow(missing_docs)]
pub fn zero() -> BorderRadiusSize {
BorderRadiusSize(Size2D::new(LengthOrPercentage::Length(Au(0)), LengthOrPercentage::Length(Au(0))))
}
}
impl ToComputedValue for specified::BorderRadiusSize {
type ComputedValue = BorderRadiusSize;
#[inline]
fn to_computed_value(&self, context: &Context) -> BorderRadiusSize {
let w = self.0.width.to_computed_value(context);
let h = self.0.height.to_computed_value(context);
BorderRadiusSize(Size2D::new(w, h))
}
#[inline]
fn from_computed_value(computed: &BorderRadiusSize) -> Self {
let w = ToComputedValue::from_computed_value(&computed.0.width);
let h = ToComputedValue::from_computed_value(&computed.0.height);
specified::BorderRadiusSize(Size2D::new(w, h))
}
}
impl ToCss for BorderRadiusSize {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
try!(self.0.width.to_css(dest));
try!(dest.write_str("/"));
self.0.height.to_css(dest)
}
}
#[derive(Debug, PartialEq, Clone, Copy)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[allow(missing_docs)]
pub struct Shadow {
pub offset_x: Au,
pub offset_y: Au,
pub blur_radius: Au,
pub spread_radius: Au,
pub color: CSSColor,
pub inset: bool,
}
/// A `<number>` value.
pub type Number = CSSFloat;
/// A type used for opacity.
pub type Opacity = CSSFloat;
/// An SVG paint value
///
/// https://www.w3.org/TR/SVG2/painting.html#SpecifyingPaint
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct SVGPaint {
/// The paint source
pub kind: SVGPaintKind,
/// The fallback color
pub fallback: Option<CSSColor>,
}
impl Default for SVGPaint {
fn default() -> Self {
SVGPaint {
kind: SVGPaintKind::None,
fallback: None,
}
}
}
impl SVGPaint {
/// Opaque black color
pub fn black() -> Self {
let rgba = RGBA::from_floats(0., 0., 0., 1.);
SVGPaint {
kind: SVGPaintKind::Color(CSSColor::RGBA(rgba)),
fallback: None,
}
}
}
/// An SVG paint value without the fallback
///
/// Whereas the spec only allows PaintServer
/// to have a fallback, Gecko lets the context
/// properties have a fallback as well.
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum SVGPaintKind {
/// `none`
None,
/// `<color>`
Color(CSSColor),
/// `url(...)`
PaintServer(SpecifiedUrl),
/// `context-fill`
ContextFill,
/// `context-stroke`
ContextStroke,
}
impl ToCss for SVGPaintKind {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match *self {
SVGPaintKind::None => dest.write_str("none"),
SVGPaintKind::ContextStroke => dest.write_str("context-stroke"),
SVGPaintKind::ContextFill => dest.write_str("context-fill"),
SVGPaintKind::Color(ref color) => color.to_css(dest),
SVGPaintKind::PaintServer(ref server) => server.to_css(dest),
}
}
}
impl ToCss for SVGPaint {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
self.kind.to_css(dest)?;
if let Some(ref fallback) = self.fallback {
fallback.to_css(dest)?;
}
Ok(())
}
}
/// <length> | <percentage> | <number>
pub type LoPOrNumber = Either<LengthOrPercentage, Number>;
#[derive(Clone, PartialEq, Eq, Copy, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[allow(missing_docs)]
/// A computed cliprect for clip and image-region
pub struct ClipRect {
pub top: Option<Au>,
pub right: Option<Au>,
pub bottom: Option<Au>,
pub left: Option<Au>,
}
impl ToCss for ClipRect {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
try!(dest.write_str("rect("));
if let Some(top) = self.top {
try!(top.to_css(dest));
try!(dest.write_str(", "));
} else {
try!(dest.write_str("auto, "));
}
if let Some(right) = self.right {
try!(right.to_css(dest));
try!(dest.write_str(", "));
} else {
try!(dest.write_str("auto, "));
}
if let Some(bottom) = self.bottom {
try!(bottom.to_css(dest));
try!(dest.write_str(", "));
} else {
try!(dest.write_str("auto, "));
}
if let Some(left) = self.left {
try!(left.to_css(dest));
} else {
try!(dest.write_str("auto"));
}
dest.write_str(")")
}
}
/// rect(...) | auto
pub type ClipRectOrAuto = Either<ClipRect, Auto>;
/// The computed value of a grid `<track-breadth>`
pub type TrackBreadth = GenericTrackBreadth<LengthOrPercentage>;
/// The computed value of a grid `<track-size>`
pub type TrackSize = GenericTrackSize<LengthOrPercentage>;
impl ClipRectOrAuto {
/// Return an auto (default for clip-rect and image-region) value
pub fn auto() -> Self {
Either::Second(Auto)
}
/// Check if it is auto
pub fn is_auto(&self) -> bool {
match *self {
Either::Second(_) => true,
_ => false
}
}
}
/// <color> | auto
pub type ColorOrAuto = Either<CSSColor, Auto>;
|
from_computed_value
|
identifier_name
|
mod.rs
|
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
mod card;
mod config;
mod deck;
mod deckconf;
mod graves;
mod note;
mod notetype;
mod revlog;
mod sqlite;
mod sync;
mod sync_check;
mod tag;
mod upgrades;
pub(crate) use sqlite::SqliteStorage;
pub(crate) use sync::open_and_check_sqlite_file;
use std::fmt::Write;
// Write a list of IDs as '(x,y,...)' into the provided string.
pub(crate) fn ids_to_string<T>(buf: &mut String, ids: &[T])
where
T: std::fmt::Display,
|
#[cfg(test)]
mod test {
use super::ids_to_string;
#[test]
fn ids_string() {
let mut s = String::new();
ids_to_string::<u8>(&mut s, &[]);
assert_eq!(s, "()");
s.clear();
ids_to_string(&mut s, &[7]);
assert_eq!(s, "(7)");
s.clear();
ids_to_string(&mut s, &[7, 6]);
assert_eq!(s, "(6,7)");
s.clear();
ids_to_string(&mut s, &[7, 6, 5]);
assert_eq!(s, "(6,5,7)");
s.clear();
}
}
|
{
buf.push('(');
if !ids.is_empty() {
for id in ids.iter().skip(1) {
write!(buf, "{},", id).unwrap();
}
write!(buf, "{}", ids[0]).unwrap();
}
buf.push(')');
}
|
identifier_body
|
mod.rs
|
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
mod card;
mod config;
mod deck;
mod deckconf;
mod graves;
mod note;
mod notetype;
mod revlog;
mod sqlite;
mod sync;
mod sync_check;
mod tag;
mod upgrades;
pub(crate) use sqlite::SqliteStorage;
pub(crate) use sync::open_and_check_sqlite_file;
use std::fmt::Write;
// Write a list of IDs as '(x,y,...)' into the provided string.
pub(crate) fn
|
<T>(buf: &mut String, ids: &[T])
where
T: std::fmt::Display,
{
buf.push('(');
if!ids.is_empty() {
for id in ids.iter().skip(1) {
write!(buf, "{},", id).unwrap();
}
write!(buf, "{}", ids[0]).unwrap();
}
buf.push(')');
}
#[cfg(test)]
mod test {
use super::ids_to_string;
#[test]
fn ids_string() {
let mut s = String::new();
ids_to_string::<u8>(&mut s, &[]);
assert_eq!(s, "()");
s.clear();
ids_to_string(&mut s, &[7]);
assert_eq!(s, "(7)");
s.clear();
ids_to_string(&mut s, &[7, 6]);
assert_eq!(s, "(6,7)");
s.clear();
ids_to_string(&mut s, &[7, 6, 5]);
assert_eq!(s, "(6,5,7)");
s.clear();
}
}
|
ids_to_string
|
identifier_name
|
mod.rs
|
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
mod card;
mod config;
mod deck;
mod deckconf;
mod graves;
mod note;
mod notetype;
mod revlog;
mod sqlite;
mod sync;
mod sync_check;
mod tag;
mod upgrades;
pub(crate) use sqlite::SqliteStorage;
pub(crate) use sync::open_and_check_sqlite_file;
use std::fmt::Write;
// Write a list of IDs as '(x,y,...)' into the provided string.
pub(crate) fn ids_to_string<T>(buf: &mut String, ids: &[T])
where
T: std::fmt::Display,
{
buf.push('(');
if!ids.is_empty()
|
buf.push(')');
}
#[cfg(test)]
mod test {
use super::ids_to_string;
#[test]
fn ids_string() {
let mut s = String::new();
ids_to_string::<u8>(&mut s, &[]);
assert_eq!(s, "()");
s.clear();
ids_to_string(&mut s, &[7]);
assert_eq!(s, "(7)");
s.clear();
ids_to_string(&mut s, &[7, 6]);
assert_eq!(s, "(6,7)");
s.clear();
ids_to_string(&mut s, &[7, 6, 5]);
assert_eq!(s, "(6,5,7)");
s.clear();
}
}
|
{
for id in ids.iter().skip(1) {
write!(buf, "{},", id).unwrap();
}
write!(buf, "{}", ids[0]).unwrap();
}
|
conditional_block
|
mod.rs
|
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
mod card;
mod config;
mod deck;
mod deckconf;
mod graves;
mod note;
mod notetype;
mod revlog;
mod sqlite;
mod sync;
mod sync_check;
mod tag;
mod upgrades;
pub(crate) use sqlite::SqliteStorage;
pub(crate) use sync::open_and_check_sqlite_file;
use std::fmt::Write;
// Write a list of IDs as '(x,y,...)' into the provided string.
pub(crate) fn ids_to_string<T>(buf: &mut String, ids: &[T])
where
T: std::fmt::Display,
{
buf.push('(');
if!ids.is_empty() {
for id in ids.iter().skip(1) {
write!(buf, "{},", id).unwrap();
}
write!(buf, "{}", ids[0]).unwrap();
}
buf.push(')');
}
#[cfg(test)]
mod test {
use super::ids_to_string;
#[test]
fn ids_string() {
let mut s = String::new();
ids_to_string::<u8>(&mut s, &[]);
assert_eq!(s, "()");
s.clear();
ids_to_string(&mut s, &[7]);
assert_eq!(s, "(7)");
s.clear();
ids_to_string(&mut s, &[7, 6]);
assert_eq!(s, "(6,7)");
s.clear();
ids_to_string(&mut s, &[7, 6, 5]);
assert_eq!(s, "(6,5,7)");
s.clear();
}
}
|
random_line_split
|
|
generator.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use util::{U256, H2048, Bytes};
use header::BlockNumber;
use transaction::SignedTransaction;
use super::fork::Fork;
use super::bloom::Bloom;
use super::complete::{BlockFinalizer, CompleteBlock, Complete};
use super::block::Block;
use super::transaction::Transaction;
/// Chain iterator interface.
pub trait ChainIterator: Iterator + Sized {
/// Should be called to create a fork of current iterator.
/// Blocks generated by fork will have lower difficulty than current chain.
fn fork(&self, fork_number: usize) -> Fork<Self> where Self: Clone;
/// Should be called to make every consecutive block have given bloom.
fn with_bloom(&mut self, bloom: H2048) -> Bloom<Self>;
/// Should be called to make every consecutive block have given transaction.
fn with_transaction(&mut self, transaction: SignedTransaction) -> Transaction<Self>;
/// Should be called to complete block. Without complete, block may have incorrect hash.
fn complete<'a>(&'a mut self, finalizer: &'a mut BlockFinalizer) -> Complete<'a, Self>;
/// Completes and generates block.
fn generate<'a>(&'a mut self, finalizer: &'a mut BlockFinalizer) -> Option<Bytes> where Self::Item: CompleteBlock;
}
impl<I> ChainIterator for I where I: Iterator + Sized {
fn fork(&self, fork_number: usize) -> Fork<Self> where I: Clone {
Fork {
iter: self.clone(),
fork_number: fork_number
}
}
fn with_bloom(&mut self, bloom: H2048) -> Bloom<Self> {
Bloom {
iter: self,
bloom: bloom
}
}
fn with_transaction(&mut self, transaction: SignedTransaction) -> Transaction<Self> {
Transaction {
iter: self,
transaction: transaction,
}
}
fn complete<'a>(&'a mut self, finalizer: &'a mut BlockFinalizer) -> Complete<'a, Self> {
Complete {
iter: self,
finalizer: finalizer
}
}
fn generate<'a>(&'a mut self, finalizer: &'a mut BlockFinalizer) -> Option<Bytes> where <I as Iterator>::Item: CompleteBlock {
self.complete(finalizer).next()
}
}
/// Blockchain generator.
#[derive(Clone)]
pub struct ChainGenerator {
/// Next block number.
number: BlockNumber,
/// Next block difficulty.
difficulty: U256,
}
impl ChainGenerator {
fn prepare_block(&self) -> Block {
let mut block = Block::default();
block.header.set_number(self.number);
block.header.set_difficulty(self.difficulty);
block
}
}
impl Default for ChainGenerator {
fn default() -> Self {
ChainGenerator {
number: 0,
difficulty: 1000.into(),
}
}
}
impl Iterator for ChainGenerator {
type Item = Block;
fn next(&mut self) -> Option<Self::Item> {
let block = self.prepare_block();
self.number += 1;
Some(block)
}
}
mod tests {
use util::hash::{H256, H2048};
use util::sha3::Hashable;
use views::BlockView;
use blockchain::generator::{ChainIterator, ChainGenerator, BlockFinalizer};
#[test]
fn canon_chain_generator() {
let mut canon_chain = ChainGenerator::default();
let mut finalizer = BlockFinalizer::default();
let genesis_rlp = canon_chain.generate(&mut finalizer).unwrap();
let genesis = BlockView::new(&genesis_rlp);
assert_eq!(genesis.header_view().parent_hash(), H256::default());
assert_eq!(genesis.header_view().number(), 0);
let b1_rlp = canon_chain.generate(&mut finalizer).unwrap();
let b1 = BlockView::new(&b1_rlp);
assert_eq!(b1.header_view().parent_hash(), genesis.header_view().sha3());
assert_eq!(b1.header_view().number(), 1);
let mut fork_chain = canon_chain.fork(1);
let b2_rlp_fork = fork_chain.generate(&mut finalizer.fork()).unwrap();
let b2_fork = BlockView::new(&b2_rlp_fork);
assert_eq!(b2_fork.header_view().parent_hash(), b1.header_view().sha3());
assert_eq!(b2_fork.header_view().number(), 2);
let b2_rlp = canon_chain.generate(&mut finalizer).unwrap();
let b2 = BlockView::new(&b2_rlp);
assert_eq!(b2.header_view().parent_hash(), b1.header_view().sha3());
assert_eq!(b2.header_view().number(), 2);
assert!(b2.header_view().difficulty() > b2_fork.header_view().difficulty());
}
#[test]
fn
|
() {
let bloom = H2048([0x1; 256]);
let mut gen = ChainGenerator::default();
let mut finalizer = BlockFinalizer::default();
let block0_rlp = gen.with_bloom(bloom).generate(&mut finalizer).unwrap();
let block1_rlp = gen.generate(&mut finalizer).unwrap();
let block0 = BlockView::new(&block0_rlp);
let block1 = BlockView::new(&block1_rlp);
assert_eq!(block0.header_view().number(), 0);
assert_eq!(block0.header_view().parent_hash(), H256::default());
assert_eq!(block1.header_view().number(), 1);
assert_eq!(block1.header_view().parent_hash(), block0.header_view().sha3());
}
#[test]
fn generate_1000_blocks() {
let generator = ChainGenerator::default();
let mut finalizer = BlockFinalizer::default();
let blocks: Vec<_> = generator.take(1000).complete(&mut finalizer).collect();
assert_eq!(blocks.len(), 1000);
}
}
|
with_bloom_generator
|
identifier_name
|
generator.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use util::{U256, H2048, Bytes};
use header::BlockNumber;
use transaction::SignedTransaction;
use super::fork::Fork;
use super::bloom::Bloom;
use super::complete::{BlockFinalizer, CompleteBlock, Complete};
use super::block::Block;
use super::transaction::Transaction;
/// Chain iterator interface.
pub trait ChainIterator: Iterator + Sized {
/// Should be called to create a fork of current iterator.
/// Blocks generated by fork will have lower difficulty than current chain.
fn fork(&self, fork_number: usize) -> Fork<Self> where Self: Clone;
/// Should be called to make every consecutive block have given bloom.
fn with_bloom(&mut self, bloom: H2048) -> Bloom<Self>;
/// Should be called to make every consecutive block have given transaction.
fn with_transaction(&mut self, transaction: SignedTransaction) -> Transaction<Self>;
/// Should be called to complete block. Without complete, block may have incorrect hash.
fn complete<'a>(&'a mut self, finalizer: &'a mut BlockFinalizer) -> Complete<'a, Self>;
/// Completes and generates block.
fn generate<'a>(&'a mut self, finalizer: &'a mut BlockFinalizer) -> Option<Bytes> where Self::Item: CompleteBlock;
}
impl<I> ChainIterator for I where I: Iterator + Sized {
fn fork(&self, fork_number: usize) -> Fork<Self> where I: Clone {
Fork {
iter: self.clone(),
fork_number: fork_number
}
}
|
bloom: bloom
}
}
fn with_transaction(&mut self, transaction: SignedTransaction) -> Transaction<Self> {
Transaction {
iter: self,
transaction: transaction,
}
}
fn complete<'a>(&'a mut self, finalizer: &'a mut BlockFinalizer) -> Complete<'a, Self> {
Complete {
iter: self,
finalizer: finalizer
}
}
fn generate<'a>(&'a mut self, finalizer: &'a mut BlockFinalizer) -> Option<Bytes> where <I as Iterator>::Item: CompleteBlock {
self.complete(finalizer).next()
}
}
/// Blockchain generator.
#[derive(Clone)]
pub struct ChainGenerator {
/// Next block number.
number: BlockNumber,
/// Next block difficulty.
difficulty: U256,
}
impl ChainGenerator {
fn prepare_block(&self) -> Block {
let mut block = Block::default();
block.header.set_number(self.number);
block.header.set_difficulty(self.difficulty);
block
}
}
impl Default for ChainGenerator {
fn default() -> Self {
ChainGenerator {
number: 0,
difficulty: 1000.into(),
}
}
}
impl Iterator for ChainGenerator {
type Item = Block;
fn next(&mut self) -> Option<Self::Item> {
let block = self.prepare_block();
self.number += 1;
Some(block)
}
}
mod tests {
use util::hash::{H256, H2048};
use util::sha3::Hashable;
use views::BlockView;
use blockchain::generator::{ChainIterator, ChainGenerator, BlockFinalizer};
#[test]
fn canon_chain_generator() {
let mut canon_chain = ChainGenerator::default();
let mut finalizer = BlockFinalizer::default();
let genesis_rlp = canon_chain.generate(&mut finalizer).unwrap();
let genesis = BlockView::new(&genesis_rlp);
assert_eq!(genesis.header_view().parent_hash(), H256::default());
assert_eq!(genesis.header_view().number(), 0);
let b1_rlp = canon_chain.generate(&mut finalizer).unwrap();
let b1 = BlockView::new(&b1_rlp);
assert_eq!(b1.header_view().parent_hash(), genesis.header_view().sha3());
assert_eq!(b1.header_view().number(), 1);
let mut fork_chain = canon_chain.fork(1);
let b2_rlp_fork = fork_chain.generate(&mut finalizer.fork()).unwrap();
let b2_fork = BlockView::new(&b2_rlp_fork);
assert_eq!(b2_fork.header_view().parent_hash(), b1.header_view().sha3());
assert_eq!(b2_fork.header_view().number(), 2);
let b2_rlp = canon_chain.generate(&mut finalizer).unwrap();
let b2 = BlockView::new(&b2_rlp);
assert_eq!(b2.header_view().parent_hash(), b1.header_view().sha3());
assert_eq!(b2.header_view().number(), 2);
assert!(b2.header_view().difficulty() > b2_fork.header_view().difficulty());
}
#[test]
fn with_bloom_generator() {
let bloom = H2048([0x1; 256]);
let mut gen = ChainGenerator::default();
let mut finalizer = BlockFinalizer::default();
let block0_rlp = gen.with_bloom(bloom).generate(&mut finalizer).unwrap();
let block1_rlp = gen.generate(&mut finalizer).unwrap();
let block0 = BlockView::new(&block0_rlp);
let block1 = BlockView::new(&block1_rlp);
assert_eq!(block0.header_view().number(), 0);
assert_eq!(block0.header_view().parent_hash(), H256::default());
assert_eq!(block1.header_view().number(), 1);
assert_eq!(block1.header_view().parent_hash(), block0.header_view().sha3());
}
#[test]
fn generate_1000_blocks() {
let generator = ChainGenerator::default();
let mut finalizer = BlockFinalizer::default();
let blocks: Vec<_> = generator.take(1000).complete(&mut finalizer).collect();
assert_eq!(blocks.len(), 1000);
}
}
|
fn with_bloom(&mut self, bloom: H2048) -> Bloom<Self> {
Bloom {
iter: self,
|
random_line_split
|
generator.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use util::{U256, H2048, Bytes};
use header::BlockNumber;
use transaction::SignedTransaction;
use super::fork::Fork;
use super::bloom::Bloom;
use super::complete::{BlockFinalizer, CompleteBlock, Complete};
use super::block::Block;
use super::transaction::Transaction;
/// Chain iterator interface.
pub trait ChainIterator: Iterator + Sized {
/// Should be called to create a fork of current iterator.
/// Blocks generated by fork will have lower difficulty than current chain.
fn fork(&self, fork_number: usize) -> Fork<Self> where Self: Clone;
/// Should be called to make every consecutive block have given bloom.
fn with_bloom(&mut self, bloom: H2048) -> Bloom<Self>;
/// Should be called to make every consecutive block have given transaction.
fn with_transaction(&mut self, transaction: SignedTransaction) -> Transaction<Self>;
/// Should be called to complete block. Without complete, block may have incorrect hash.
fn complete<'a>(&'a mut self, finalizer: &'a mut BlockFinalizer) -> Complete<'a, Self>;
/// Completes and generates block.
fn generate<'a>(&'a mut self, finalizer: &'a mut BlockFinalizer) -> Option<Bytes> where Self::Item: CompleteBlock;
}
impl<I> ChainIterator for I where I: Iterator + Sized {
fn fork(&self, fork_number: usize) -> Fork<Self> where I: Clone {
Fork {
iter: self.clone(),
fork_number: fork_number
}
}
fn with_bloom(&mut self, bloom: H2048) -> Bloom<Self> {
Bloom {
iter: self,
bloom: bloom
}
}
fn with_transaction(&mut self, transaction: SignedTransaction) -> Transaction<Self>
|
fn complete<'a>(&'a mut self, finalizer: &'a mut BlockFinalizer) -> Complete<'a, Self> {
Complete {
iter: self,
finalizer: finalizer
}
}
fn generate<'a>(&'a mut self, finalizer: &'a mut BlockFinalizer) -> Option<Bytes> where <I as Iterator>::Item: CompleteBlock {
self.complete(finalizer).next()
}
}
/// Blockchain generator.
#[derive(Clone)]
pub struct ChainGenerator {
/// Next block number.
number: BlockNumber,
/// Next block difficulty.
difficulty: U256,
}
impl ChainGenerator {
fn prepare_block(&self) -> Block {
let mut block = Block::default();
block.header.set_number(self.number);
block.header.set_difficulty(self.difficulty);
block
}
}
impl Default for ChainGenerator {
fn default() -> Self {
ChainGenerator {
number: 0,
difficulty: 1000.into(),
}
}
}
impl Iterator for ChainGenerator {
type Item = Block;
fn next(&mut self) -> Option<Self::Item> {
let block = self.prepare_block();
self.number += 1;
Some(block)
}
}
mod tests {
use util::hash::{H256, H2048};
use util::sha3::Hashable;
use views::BlockView;
use blockchain::generator::{ChainIterator, ChainGenerator, BlockFinalizer};
#[test]
fn canon_chain_generator() {
let mut canon_chain = ChainGenerator::default();
let mut finalizer = BlockFinalizer::default();
let genesis_rlp = canon_chain.generate(&mut finalizer).unwrap();
let genesis = BlockView::new(&genesis_rlp);
assert_eq!(genesis.header_view().parent_hash(), H256::default());
assert_eq!(genesis.header_view().number(), 0);
let b1_rlp = canon_chain.generate(&mut finalizer).unwrap();
let b1 = BlockView::new(&b1_rlp);
assert_eq!(b1.header_view().parent_hash(), genesis.header_view().sha3());
assert_eq!(b1.header_view().number(), 1);
let mut fork_chain = canon_chain.fork(1);
let b2_rlp_fork = fork_chain.generate(&mut finalizer.fork()).unwrap();
let b2_fork = BlockView::new(&b2_rlp_fork);
assert_eq!(b2_fork.header_view().parent_hash(), b1.header_view().sha3());
assert_eq!(b2_fork.header_view().number(), 2);
let b2_rlp = canon_chain.generate(&mut finalizer).unwrap();
let b2 = BlockView::new(&b2_rlp);
assert_eq!(b2.header_view().parent_hash(), b1.header_view().sha3());
assert_eq!(b2.header_view().number(), 2);
assert!(b2.header_view().difficulty() > b2_fork.header_view().difficulty());
}
#[test]
fn with_bloom_generator() {
let bloom = H2048([0x1; 256]);
let mut gen = ChainGenerator::default();
let mut finalizer = BlockFinalizer::default();
let block0_rlp = gen.with_bloom(bloom).generate(&mut finalizer).unwrap();
let block1_rlp = gen.generate(&mut finalizer).unwrap();
let block0 = BlockView::new(&block0_rlp);
let block1 = BlockView::new(&block1_rlp);
assert_eq!(block0.header_view().number(), 0);
assert_eq!(block0.header_view().parent_hash(), H256::default());
assert_eq!(block1.header_view().number(), 1);
assert_eq!(block1.header_view().parent_hash(), block0.header_view().sha3());
}
#[test]
fn generate_1000_blocks() {
let generator = ChainGenerator::default();
let mut finalizer = BlockFinalizer::default();
let blocks: Vec<_> = generator.take(1000).complete(&mut finalizer).collect();
assert_eq!(blocks.len(), 1000);
}
}
|
{
Transaction {
iter: self,
transaction: transaction,
}
}
|
identifier_body
|
overlay.rs
|
// This file was generated by gir (5c017c9) from gir-files (71d73f0)
// DO NOT EDIT
use Bin;
use Container;
use Widget;
use ffi;
use glib::Value;
use glib::object::Downcast;
use glib::object::IsA;
use glib::translate::*;
glib_wrapper! {
pub struct Overlay(Object<ffi::GtkOverlay>): Bin, Container, Widget;
match fn {
get_type => || ffi::gtk_overlay_get_type(),
}
}
impl Overlay {
pub fn new() -> Overlay {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_overlay_new()).downcast_unchecked()
}
}
}
pub trait OverlayExt {
fn add_overlay<P: IsA<Widget>>(&self, widget: &P);
#[cfg(feature = "v3_18")]
fn get_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P) -> bool;
#[cfg(feature = "v3_18")]
fn reorder_overlay<P: IsA<Widget>>(&self, child: &P, position: i32);
#[cfg(feature = "v3_18")]
fn set_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P, pass_through: bool);
fn get_child_index<T: IsA<Widget>>(&self, item: &T) -> i32;
fn set_child_index<T: IsA<Widget>>(&self, item: &T, index: i32);
//fn connect_get_child_position<Unsupported or ignored types>(&self, f: F) -> u64;
}
impl<O: IsA<Overlay> + IsA<Container>> OverlayExt for O {
fn add_overlay<P: IsA<Widget>>(&self, widget: &P) {
unsafe {
ffi::gtk_overlay_add_overlay(self.to_glib_none().0, widget.to_glib_none().0);
}
}
#[cfg(feature = "v3_18")]
fn get_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P) -> bool {
unsafe {
from_glib(ffi::gtk_overlay_get_overlay_pass_through(self.to_glib_none().0, widget.to_glib_none().0))
}
}
#[cfg(feature = "v3_18")]
fn reorder_overlay<P: IsA<Widget>>(&self, child: &P, position: i32) {
unsafe {
ffi::gtk_overlay_reorder_overlay(self.to_glib_none().0, child.to_glib_none().0, position);
}
}
#[cfg(feature = "v3_18")]
fn set_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P, pass_through: bool) {
unsafe {
ffi::gtk_overlay_set_overlay_pass_through(self.to_glib_none().0, widget.to_glib_none().0, pass_through.to_glib());
}
}
fn get_child_index<T: IsA<Widget>>(&self, item: &T) -> i32 {
let mut value = Value::from(&0);
unsafe {
ffi::gtk_container_child_get_property(self.to_glib_none().0, item.to_glib_none().0, "index".to_glib_none().0, value.to_glib_none_mut().0);
}
value.get().unwrap()
}
fn
|
<T: IsA<Widget>>(&self, item: &T, index: i32) {
unsafe {
ffi::gtk_container_child_set_property(self.to_glib_none().0, item.to_glib_none().0, "index".to_glib_none().0, Value::from(&index).to_glib_none().0);
}
}
//fn connect_get_child_position<Unsupported or ignored types>(&self, f: F) -> u64 {
// Out allocation: Gdk.Rectangle
//}
}
|
set_child_index
|
identifier_name
|
overlay.rs
|
// This file was generated by gir (5c017c9) from gir-files (71d73f0)
// DO NOT EDIT
use Bin;
use Container;
use Widget;
use ffi;
use glib::Value;
use glib::object::Downcast;
use glib::object::IsA;
use glib::translate::*;
glib_wrapper! {
pub struct Overlay(Object<ffi::GtkOverlay>): Bin, Container, Widget;
match fn {
get_type => || ffi::gtk_overlay_get_type(),
}
}
impl Overlay {
pub fn new() -> Overlay {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_overlay_new()).downcast_unchecked()
}
}
}
pub trait OverlayExt {
fn add_overlay<P: IsA<Widget>>(&self, widget: &P);
#[cfg(feature = "v3_18")]
fn get_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P) -> bool;
#[cfg(feature = "v3_18")]
fn reorder_overlay<P: IsA<Widget>>(&self, child: &P, position: i32);
#[cfg(feature = "v3_18")]
fn set_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P, pass_through: bool);
fn get_child_index<T: IsA<Widget>>(&self, item: &T) -> i32;
fn set_child_index<T: IsA<Widget>>(&self, item: &T, index: i32);
//fn connect_get_child_position<Unsupported or ignored types>(&self, f: F) -> u64;
}
|
ffi::gtk_overlay_add_overlay(self.to_glib_none().0, widget.to_glib_none().0);
}
}
#[cfg(feature = "v3_18")]
fn get_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P) -> bool {
unsafe {
from_glib(ffi::gtk_overlay_get_overlay_pass_through(self.to_glib_none().0, widget.to_glib_none().0))
}
}
#[cfg(feature = "v3_18")]
fn reorder_overlay<P: IsA<Widget>>(&self, child: &P, position: i32) {
unsafe {
ffi::gtk_overlay_reorder_overlay(self.to_glib_none().0, child.to_glib_none().0, position);
}
}
#[cfg(feature = "v3_18")]
fn set_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P, pass_through: bool) {
unsafe {
ffi::gtk_overlay_set_overlay_pass_through(self.to_glib_none().0, widget.to_glib_none().0, pass_through.to_glib());
}
}
fn get_child_index<T: IsA<Widget>>(&self, item: &T) -> i32 {
let mut value = Value::from(&0);
unsafe {
ffi::gtk_container_child_get_property(self.to_glib_none().0, item.to_glib_none().0, "index".to_glib_none().0, value.to_glib_none_mut().0);
}
value.get().unwrap()
}
fn set_child_index<T: IsA<Widget>>(&self, item: &T, index: i32) {
unsafe {
ffi::gtk_container_child_set_property(self.to_glib_none().0, item.to_glib_none().0, "index".to_glib_none().0, Value::from(&index).to_glib_none().0);
}
}
//fn connect_get_child_position<Unsupported or ignored types>(&self, f: F) -> u64 {
// Out allocation: Gdk.Rectangle
//}
}
|
impl<O: IsA<Overlay> + IsA<Container>> OverlayExt for O {
fn add_overlay<P: IsA<Widget>>(&self, widget: &P) {
unsafe {
|
random_line_split
|
overlay.rs
|
// This file was generated by gir (5c017c9) from gir-files (71d73f0)
// DO NOT EDIT
use Bin;
use Container;
use Widget;
use ffi;
use glib::Value;
use glib::object::Downcast;
use glib::object::IsA;
use glib::translate::*;
glib_wrapper! {
pub struct Overlay(Object<ffi::GtkOverlay>): Bin, Container, Widget;
match fn {
get_type => || ffi::gtk_overlay_get_type(),
}
}
impl Overlay {
pub fn new() -> Overlay {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_overlay_new()).downcast_unchecked()
}
}
}
pub trait OverlayExt {
fn add_overlay<P: IsA<Widget>>(&self, widget: &P);
#[cfg(feature = "v3_18")]
fn get_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P) -> bool;
#[cfg(feature = "v3_18")]
fn reorder_overlay<P: IsA<Widget>>(&self, child: &P, position: i32);
#[cfg(feature = "v3_18")]
fn set_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P, pass_through: bool);
fn get_child_index<T: IsA<Widget>>(&self, item: &T) -> i32;
fn set_child_index<T: IsA<Widget>>(&self, item: &T, index: i32);
//fn connect_get_child_position<Unsupported or ignored types>(&self, f: F) -> u64;
}
impl<O: IsA<Overlay> + IsA<Container>> OverlayExt for O {
fn add_overlay<P: IsA<Widget>>(&self, widget: &P) {
unsafe {
ffi::gtk_overlay_add_overlay(self.to_glib_none().0, widget.to_glib_none().0);
}
}
#[cfg(feature = "v3_18")]
fn get_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P) -> bool {
unsafe {
from_glib(ffi::gtk_overlay_get_overlay_pass_through(self.to_glib_none().0, widget.to_glib_none().0))
}
}
#[cfg(feature = "v3_18")]
fn reorder_overlay<P: IsA<Widget>>(&self, child: &P, position: i32) {
unsafe {
ffi::gtk_overlay_reorder_overlay(self.to_glib_none().0, child.to_glib_none().0, position);
}
}
#[cfg(feature = "v3_18")]
fn set_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P, pass_through: bool) {
unsafe {
ffi::gtk_overlay_set_overlay_pass_through(self.to_glib_none().0, widget.to_glib_none().0, pass_through.to_glib());
}
}
fn get_child_index<T: IsA<Widget>>(&self, item: &T) -> i32
|
fn set_child_index<T: IsA<Widget>>(&self, item: &T, index: i32) {
unsafe {
ffi::gtk_container_child_set_property(self.to_glib_none().0, item.to_glib_none().0, "index".to_glib_none().0, Value::from(&index).to_glib_none().0);
}
}
//fn connect_get_child_position<Unsupported or ignored types>(&self, f: F) -> u64 {
// Out allocation: Gdk.Rectangle
//}
}
|
{
let mut value = Value::from(&0);
unsafe {
ffi::gtk_container_child_get_property(self.to_glib_none().0, item.to_glib_none().0, "index".to_glib_none().0, value.to_glib_none_mut().0);
}
value.get().unwrap()
}
|
identifier_body
|
kill.rs
|
#![crate_name = "kill"]
#![feature(collections, core, old_io, rustc_private, unicode)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Maciej Dziardziel <[email protected]>
*
* For the full copyright and license information, please view the LICENSE file
* that was distributed with this source code.
*/
extern crate getopts;
extern crate libc;
extern crate collections;
extern crate serialize;
#[macro_use] extern crate log;
use std::process::Child;
use getopts::{
getopts,
optopt,
optflag,
optflagopt,
usage,
};
use signals::ALL_SIGNALS;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
#[path = "signals.rs"]
mod signals;
static NAME: &'static str = "kill";
static VERSION: &'static str = "0.0.1";
static EXIT_OK: i32 = 0;
static EXIT_ERR: i32 = 1;
#[derive(Clone)]
pub enum Mode {
Kill,
Table,
List,
Help,
Version,
}
impl Copy for Mode {}
pub fn main(args: Vec<String>) -> i32 {
let opts = [
optflag("h", "help", "display this help and exit"),
optflag("V", "version", "output version information and exit"),
optopt("s", "signal", "specify the <signal> to be sent", "SIGNAL"),
optflagopt("l", "list", "list all signal names, or convert one to a name", "LIST"),
optflag("L", "table", "list all signal names in a nice table"),
];
let usage = usage("[options] <pid> [...]", &opts);
let (args, obs_signal) = handle_obsolete(args);
let matches = match getopts(args.tail(), &opts) {
Ok(m) => m,
Err(e) => {
show_error!("{}\n{}", e, get_help_text(NAME, usage.as_slice()));
return EXIT_ERR;
},
};
let mode = if matches.opt_present("version") {
Mode::Version
} else if matches.opt_present("help") {
Mode::Help
} else if matches.opt_present("table") {
Mode::Table
} else if matches.opt_present("list") {
Mode::List
} else {
Mode::Kill
};
match mode {
Mode::Kill => return kill(matches.opt_str("signal").unwrap_or(obs_signal.unwrap_or("9".to_string())).as_slice(), matches.free),
Mode::Table => table(),
Mode::List => list(matches.opt_str("list")),
Mode::Help => help(NAME, usage.as_slice()),
Mode::Version => version(),
}
0
}
fn version() {
println!("{} {}", NAME, VERSION);
}
fn handle_obsolete(mut args: Vec<String>) -> (Vec<String>, Option<String>) {
let mut i = 0;
while i < args.len() {
// this is safe because slice is valid when it is referenced
let slice: &str = unsafe { std::mem::transmute(args[i].as_slice()) };
if slice.char_at(0) == '-' && slice.len() > 1 && slice.char_at(1).is_digit(10) {
let val = &slice[1..];
match val.parse() {
Ok(num) => {
if signals::is_signal(num) {
args.remove(i);
return (args, Some(val.to_string()));
}
}
Err(_)=> break /* getopts will error out for us */
}
}
i += 1;
}
(args, None)
}
fn table() {
let mut name_width = 0;
/* Compute the maximum width of a signal name. */
for s in ALL_SIGNALS.iter() {
if s.name.len() > name_width {
name_width = s.name.len()
}
}
for (idx, signal) in ALL_SIGNALS.iter().enumerate() {
print!("{0: >#2} {1: <#8}", idx+1, signal.name);
//TODO: obtain max signal width here
if (idx+1) % 7 == 0 {
println!("");
}
}
}
fn
|
(signal_name_or_value: &str) {
for signal in ALL_SIGNALS.iter() {
if signal.name == signal_name_or_value || (format!("SIG{}", signal.name).as_slice()) == signal_name_or_value {
println!("{}", signal.value);
exit!(EXIT_OK as i32)
} else if signal_name_or_value == signal.value.as_slice() {
println!("{}", signal.name);
exit!(EXIT_OK as i32)
}
}
crash!(EXIT_ERR, "unknown signal name {}", signal_name_or_value)
}
fn print_signals() {
let mut pos = 0;
for (idx, signal) in ALL_SIGNALS.iter().enumerate() {
pos += signal.name.len();
print!("{}", signal.name);
if idx > 0 && pos > 73 {
println!("");
pos = 0;
} else {
pos += 1;
print!(" ");
}
}
}
fn list(arg: Option<String>) {
match arg {
Some(x) => print_signal(x.as_slice()),
None => print_signals(),
};
}
fn get_help_text(progname: &str, usage: &str) -> String {
format!("Usage: \n {0} {1}", progname, usage)
}
fn help(progname: &str, usage: &str) {
println!("{}", get_help_text(progname, usage));
}
fn kill(signalname: &str, pids: std::vec::Vec<String>) -> i32 {
let mut status = 0;
let optional_signal_value = signals::signal_by_name_or_value(signalname);
let signal_value = match optional_signal_value {
Some(x) => x,
None => crash!(EXIT_ERR, "unknown signal name {}", signalname)
};
for pid in pids.iter() {
match pid.as_slice().parse() {
Ok(x) => {
let result = Child::kill(x, signal_value as isize);
match result {
Ok(_) => (),
Err(f) => {
show_error!("{}", f);
status = 1;
}
};
},
Err(e) => crash!(EXIT_ERR, "failed to parse argument {}: {}", pid, e)
};
}
status
}
|
print_signal
|
identifier_name
|
kill.rs
|
#![crate_name = "kill"]
#![feature(collections, core, old_io, rustc_private, unicode)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Maciej Dziardziel <[email protected]>
*
* For the full copyright and license information, please view the LICENSE file
* that was distributed with this source code.
*/
extern crate getopts;
extern crate libc;
extern crate collections;
extern crate serialize;
#[macro_use] extern crate log;
use std::process::Child;
use getopts::{
getopts,
optopt,
optflag,
optflagopt,
usage,
};
use signals::ALL_SIGNALS;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
|
static EXIT_OK: i32 = 0;
static EXIT_ERR: i32 = 1;
#[derive(Clone)]
pub enum Mode {
Kill,
Table,
List,
Help,
Version,
}
impl Copy for Mode {}
pub fn main(args: Vec<String>) -> i32 {
let opts = [
optflag("h", "help", "display this help and exit"),
optflag("V", "version", "output version information and exit"),
optopt("s", "signal", "specify the <signal> to be sent", "SIGNAL"),
optflagopt("l", "list", "list all signal names, or convert one to a name", "LIST"),
optflag("L", "table", "list all signal names in a nice table"),
];
let usage = usage("[options] <pid> [...]", &opts);
let (args, obs_signal) = handle_obsolete(args);
let matches = match getopts(args.tail(), &opts) {
Ok(m) => m,
Err(e) => {
show_error!("{}\n{}", e, get_help_text(NAME, usage.as_slice()));
return EXIT_ERR;
},
};
let mode = if matches.opt_present("version") {
Mode::Version
} else if matches.opt_present("help") {
Mode::Help
} else if matches.opt_present("table") {
Mode::Table
} else if matches.opt_present("list") {
Mode::List
} else {
Mode::Kill
};
match mode {
Mode::Kill => return kill(matches.opt_str("signal").unwrap_or(obs_signal.unwrap_or("9".to_string())).as_slice(), matches.free),
Mode::Table => table(),
Mode::List => list(matches.opt_str("list")),
Mode::Help => help(NAME, usage.as_slice()),
Mode::Version => version(),
}
0
}
fn version() {
println!("{} {}", NAME, VERSION);
}
fn handle_obsolete(mut args: Vec<String>) -> (Vec<String>, Option<String>) {
let mut i = 0;
while i < args.len() {
// this is safe because slice is valid when it is referenced
let slice: &str = unsafe { std::mem::transmute(args[i].as_slice()) };
if slice.char_at(0) == '-' && slice.len() > 1 && slice.char_at(1).is_digit(10) {
let val = &slice[1..];
match val.parse() {
Ok(num) => {
if signals::is_signal(num) {
args.remove(i);
return (args, Some(val.to_string()));
}
}
Err(_)=> break /* getopts will error out for us */
}
}
i += 1;
}
(args, None)
}
fn table() {
let mut name_width = 0;
/* Compute the maximum width of a signal name. */
for s in ALL_SIGNALS.iter() {
if s.name.len() > name_width {
name_width = s.name.len()
}
}
for (idx, signal) in ALL_SIGNALS.iter().enumerate() {
print!("{0: >#2} {1: <#8}", idx+1, signal.name);
//TODO: obtain max signal width here
if (idx+1) % 7 == 0 {
println!("");
}
}
}
fn print_signal(signal_name_or_value: &str) {
for signal in ALL_SIGNALS.iter() {
if signal.name == signal_name_or_value || (format!("SIG{}", signal.name).as_slice()) == signal_name_or_value {
println!("{}", signal.value);
exit!(EXIT_OK as i32)
} else if signal_name_or_value == signal.value.as_slice() {
println!("{}", signal.name);
exit!(EXIT_OK as i32)
}
}
crash!(EXIT_ERR, "unknown signal name {}", signal_name_or_value)
}
fn print_signals() {
let mut pos = 0;
for (idx, signal) in ALL_SIGNALS.iter().enumerate() {
pos += signal.name.len();
print!("{}", signal.name);
if idx > 0 && pos > 73 {
println!("");
pos = 0;
} else {
pos += 1;
print!(" ");
}
}
}
fn list(arg: Option<String>) {
match arg {
Some(x) => print_signal(x.as_slice()),
None => print_signals(),
};
}
fn get_help_text(progname: &str, usage: &str) -> String {
format!("Usage: \n {0} {1}", progname, usage)
}
fn help(progname: &str, usage: &str) {
println!("{}", get_help_text(progname, usage));
}
fn kill(signalname: &str, pids: std::vec::Vec<String>) -> i32 {
let mut status = 0;
let optional_signal_value = signals::signal_by_name_or_value(signalname);
let signal_value = match optional_signal_value {
Some(x) => x,
None => crash!(EXIT_ERR, "unknown signal name {}", signalname)
};
for pid in pids.iter() {
match pid.as_slice().parse() {
Ok(x) => {
let result = Child::kill(x, signal_value as isize);
match result {
Ok(_) => (),
Err(f) => {
show_error!("{}", f);
status = 1;
}
};
},
Err(e) => crash!(EXIT_ERR, "failed to parse argument {}: {}", pid, e)
};
}
status
}
|
#[path = "signals.rs"]
mod signals;
static NAME: &'static str = "kill";
static VERSION: &'static str = "0.0.1";
|
random_line_split
|
shootout-k-nucleotide.rs
|
// xfail-test
extern mod extra;
use std::cast::transmute;
use std::i32::range;
use std::libc::{STDIN_FILENO, c_int, fdopen, fgets, fileno, fopen, fstat};
use std::libc::{stat, strlen};
use std::ptr::null;
use std::unstable::intrinsics::init;
use std::vec::{reverse};
use extra::sort::quick_sort3;
static LINE_LEN: uint = 80;
static TABLE: [u8,..4] = [ 'A' as u8, 'C' as u8, 'G' as u8, 'T' as u8 ];
static TABLE_SIZE: uint = 2 << 16;
static OCCURRENCES: [&'static str,..5] = [
"GGT",
"GGTA",
"GGTATT",
"GGTATTTTAATT",
"GGTATTTTAATTTATAGT",
];
// Code implementation
#[deriving(Eq, Ord)]
struct Code(u64);
impl Code {
fn hash(&self) -> u64 {
**self
}
#[inline(always)]
fn push_char(&self, c: u8) -> Code {
Code((**self << 2) + (pack_symbol(c) as u64))
}
fn rotate(&self, c: u8, frame: i32) -> Code {
Code(*self.push_char(c) & ((1u64 << (2 * (frame as u64))) - 1))
}
fn
|
(string: &str) -> Code {
let mut code = Code(0u64);
for uint::range(0, string.len()) |i| {
code = code.push_char(string[i]);
}
code
}
// XXX: Inefficient.
fn unpack(&self, frame: i32) -> ~str {
let mut key = **self;
let mut result = ~[];
for (frame as uint).times {
result.push(unpack_symbol((key as u8) & 3));
key >>= 2;
}
reverse(result);
str::from_bytes(result)
}
}
// Hash table implementation
trait TableCallback {
fn f(&self, entry: &mut Entry);
}
struct BumpCallback;
impl TableCallback for BumpCallback {
fn f(&self, entry: &mut Entry) {
entry.count += 1;
}
}
struct PrintCallback(&'static str);
impl TableCallback for PrintCallback {
fn f(&self, entry: &mut Entry) {
println(fmt!("%d\t%s", entry.count as int, **self));
}
}
struct Entry {
code: Code,
count: i32,
next: Option<~Entry>,
}
struct Table {
count: i32,
items: [Option<~Entry>,..TABLE_SIZE]
}
impl Table {
fn new() -> Table {
Table {
count: 0,
items: [ None,..TABLE_SIZE ],
}
}
fn search_remainder<C:TableCallback>(item: &mut Entry, key: Code, c: C) {
match item.next {
None => {
let mut entry = ~Entry {
code: key,
count: 0,
next: None,
};
c.f(entry);
item.next = Some(entry);
}
Some(ref mut entry) => {
if entry.code == key {
c.f(*entry);
return;
}
Table::search_remainder(*entry, key, c)
}
}
}
fn lookup<C:TableCallback>(&mut self, key: Code, c: C) {
let index = *key % (TABLE_SIZE as u64);
{
if self.items[index].is_none() {
let mut entry = ~Entry {
code: key,
count: 0,
next: None,
};
c.f(entry);
self.items[index] = Some(entry);
return;
}
}
{
let mut entry = &mut *self.items[index].get_mut_ref();
if entry.code == key {
c.f(*entry);
return;
}
Table::search_remainder(*entry, key, c)
}
}
fn each(&self, f: &fn(entry: &Entry) -> bool) {
for self.items.each |item| {
match *item {
None => {}
Some(ref item) => {
let mut item: &Entry = *item;
loop {
if!f(item) {
return;
}
match item.next {
None => break,
Some(ref next_item) => item = &**next_item,
}
}
}
};
}
}
}
// Main program
fn pack_symbol(c: u8) -> u8 {
match c {
'a' as u8 | 'A' as u8 => 0,
'c' as u8 | 'C' as u8 => 1,
'g' as u8 | 'G' as u8 => 2,
't' as u8 | 'T' as u8 => 3,
_ => fail!(c.to_str())
}
}
fn unpack_symbol(c: u8) -> u8 {
TABLE[c]
}
fn next_char<'a>(mut buf: &'a [u8]) -> &'a [u8] {
loop {
buf = buf.slice(1, buf.len());
if buf.len() == 0 {
break;
}
if buf[0]!= (''as u8) && buf[0]!= ('\t' as u8) &&
buf[0]!= ('\n' as u8) && buf[0]!= 0 {
break;
}
}
buf
}
#[inline(never)]
fn read_stdin() -> ~[u8] {
unsafe {
let mode = "r";
//let stdin = fdopen(STDIN_FILENO as c_int, transmute(&mode[0]));
let path = "knucleotide-input.txt";
let stdin = fopen(transmute(&path[0]), transmute(&mode[0]));
let mut st: stat = init();
fstat(fileno(stdin), &mut st);
let mut buf = vec::from_elem(st.st_size as uint, 0);
let header = ">THREE".as_bytes();
{
let mut window: &mut [u8] = buf;
loop {
fgets(transmute(&mut window[0]), LINE_LEN as c_int, stdin);
{
if window.slice(0, 6) == header {
break;
}
}
}
while fgets(transmute(&mut window[0]),
LINE_LEN as c_int,
stdin)!= null() {
window = window.mut_slice(strlen(transmute(&window[0])) as uint, window.len());
}
}
buf
}
}
#[inline(never)]
#[fixed_stack_segment]
fn generate_frequencies(frequencies: &mut Table,
mut input: &[u8],
frame: i32) {
let mut code = Code(0);
// Pull first frame.
for (frame as uint).times {
code = code.push_char(input[0]);
input = next_char(input);
}
frequencies.lookup(code, BumpCallback);
while input.len()!= 0 && input[0]!= ('>' as u8) {
code = code.rotate(input[0], frame);
frequencies.lookup(code, BumpCallback);
input = next_char(input);
}
}
#[inline(never)]
#[fixed_stack_segment]
fn print_frequencies(frequencies: &Table, frame: i32) {
let mut vector = ~[];
for frequencies.each |entry| {
vector.push((entry.code, entry.count));
}
quick_sort3(vector);
let mut total_count = 0;
for vector.each |&(_, count)| {
total_count += count;
}
for vector.each |&(key, count)| {
println(fmt!("%s %.3f",
key.unpack(frame),
(count as float * 100.0) / (total_count as float)));
}
}
fn print_occurrences(frequencies: &mut Table, occurrence: &'static str) {
frequencies.lookup(Code::pack(occurrence), PrintCallback(occurrence))
}
#[fixed_stack_segment]
fn main() {
let input = read_stdin();
let mut frequencies = ~Table::new();
generate_frequencies(frequencies, input, 1);
print_frequencies(frequencies, 1);
*frequencies = Table::new();
generate_frequencies(frequencies, input, 2);
print_frequencies(frequencies, 2);
for range(0, 5) |i| {
let occurrence = OCCURRENCES[i];
*frequencies = Table::new();
generate_frequencies(frequencies,
input,
occurrence.len() as i32);
print_occurrences(frequencies, occurrence);
}
}
|
pack
|
identifier_name
|
shootout-k-nucleotide.rs
|
// xfail-test
extern mod extra;
use std::cast::transmute;
use std::i32::range;
use std::libc::{STDIN_FILENO, c_int, fdopen, fgets, fileno, fopen, fstat};
use std::libc::{stat, strlen};
use std::ptr::null;
use std::unstable::intrinsics::init;
use std::vec::{reverse};
use extra::sort::quick_sort3;
static LINE_LEN: uint = 80;
static TABLE: [u8,..4] = [ 'A' as u8, 'C' as u8, 'G' as u8, 'T' as u8 ];
static TABLE_SIZE: uint = 2 << 16;
static OCCURRENCES: [&'static str,..5] = [
"GGT",
"GGTA",
"GGTATT",
"GGTATTTTAATT",
"GGTATTTTAATTTATAGT",
];
// Code implementation
#[deriving(Eq, Ord)]
struct Code(u64);
impl Code {
fn hash(&self) -> u64 {
**self
}
#[inline(always)]
fn push_char(&self, c: u8) -> Code {
Code((**self << 2) + (pack_symbol(c) as u64))
}
fn rotate(&self, c: u8, frame: i32) -> Code {
Code(*self.push_char(c) & ((1u64 << (2 * (frame as u64))) - 1))
}
fn pack(string: &str) -> Code {
let mut code = Code(0u64);
for uint::range(0, string.len()) |i| {
code = code.push_char(string[i]);
}
code
}
// XXX: Inefficient.
fn unpack(&self, frame: i32) -> ~str {
let mut key = **self;
let mut result = ~[];
for (frame as uint).times {
result.push(unpack_symbol((key as u8) & 3));
key >>= 2;
}
reverse(result);
str::from_bytes(result)
}
}
// Hash table implementation
trait TableCallback {
fn f(&self, entry: &mut Entry);
}
struct BumpCallback;
impl TableCallback for BumpCallback {
fn f(&self, entry: &mut Entry) {
entry.count += 1;
}
}
struct PrintCallback(&'static str);
impl TableCallback for PrintCallback {
fn f(&self, entry: &mut Entry) {
println(fmt!("%d\t%s", entry.count as int, **self));
}
}
struct Entry {
code: Code,
count: i32,
next: Option<~Entry>,
}
struct Table {
count: i32,
items: [Option<~Entry>,..TABLE_SIZE]
}
impl Table {
fn new() -> Table {
Table {
count: 0,
items: [ None,..TABLE_SIZE ],
}
}
fn search_remainder<C:TableCallback>(item: &mut Entry, key: Code, c: C) {
match item.next {
None => {
let mut entry = ~Entry {
code: key,
count: 0,
next: None,
};
c.f(entry);
item.next = Some(entry);
}
Some(ref mut entry) => {
if entry.code == key {
c.f(*entry);
return;
}
Table::search_remainder(*entry, key, c)
}
}
}
fn lookup<C:TableCallback>(&mut self, key: Code, c: C) {
let index = *key % (TABLE_SIZE as u64);
{
if self.items[index].is_none()
|
}
{
let mut entry = &mut *self.items[index].get_mut_ref();
if entry.code == key {
c.f(*entry);
return;
}
Table::search_remainder(*entry, key, c)
}
}
fn each(&self, f: &fn(entry: &Entry) -> bool) {
for self.items.each |item| {
match *item {
None => {}
Some(ref item) => {
let mut item: &Entry = *item;
loop {
if!f(item) {
return;
}
match item.next {
None => break,
Some(ref next_item) => item = &**next_item,
}
}
}
};
}
}
}
// Main program
fn pack_symbol(c: u8) -> u8 {
match c {
'a' as u8 | 'A' as u8 => 0,
'c' as u8 | 'C' as u8 => 1,
'g' as u8 | 'G' as u8 => 2,
't' as u8 | 'T' as u8 => 3,
_ => fail!(c.to_str())
}
}
fn unpack_symbol(c: u8) -> u8 {
TABLE[c]
}
fn next_char<'a>(mut buf: &'a [u8]) -> &'a [u8] {
loop {
buf = buf.slice(1, buf.len());
if buf.len() == 0 {
break;
}
if buf[0]!= (''as u8) && buf[0]!= ('\t' as u8) &&
buf[0]!= ('\n' as u8) && buf[0]!= 0 {
break;
}
}
buf
}
#[inline(never)]
fn read_stdin() -> ~[u8] {
unsafe {
let mode = "r";
//let stdin = fdopen(STDIN_FILENO as c_int, transmute(&mode[0]));
let path = "knucleotide-input.txt";
let stdin = fopen(transmute(&path[0]), transmute(&mode[0]));
let mut st: stat = init();
fstat(fileno(stdin), &mut st);
let mut buf = vec::from_elem(st.st_size as uint, 0);
let header = ">THREE".as_bytes();
{
let mut window: &mut [u8] = buf;
loop {
fgets(transmute(&mut window[0]), LINE_LEN as c_int, stdin);
{
if window.slice(0, 6) == header {
break;
}
}
}
while fgets(transmute(&mut window[0]),
LINE_LEN as c_int,
stdin)!= null() {
window = window.mut_slice(strlen(transmute(&window[0])) as uint, window.len());
}
}
buf
}
}
#[inline(never)]
#[fixed_stack_segment]
fn generate_frequencies(frequencies: &mut Table,
mut input: &[u8],
frame: i32) {
let mut code = Code(0);
// Pull first frame.
for (frame as uint).times {
code = code.push_char(input[0]);
input = next_char(input);
}
frequencies.lookup(code, BumpCallback);
while input.len()!= 0 && input[0]!= ('>' as u8) {
code = code.rotate(input[0], frame);
frequencies.lookup(code, BumpCallback);
input = next_char(input);
}
}
#[inline(never)]
#[fixed_stack_segment]
fn print_frequencies(frequencies: &Table, frame: i32) {
let mut vector = ~[];
for frequencies.each |entry| {
vector.push((entry.code, entry.count));
}
quick_sort3(vector);
let mut total_count = 0;
for vector.each |&(_, count)| {
total_count += count;
}
for vector.each |&(key, count)| {
println(fmt!("%s %.3f",
key.unpack(frame),
(count as float * 100.0) / (total_count as float)));
}
}
fn print_occurrences(frequencies: &mut Table, occurrence: &'static str) {
frequencies.lookup(Code::pack(occurrence), PrintCallback(occurrence))
}
#[fixed_stack_segment]
fn main() {
let input = read_stdin();
let mut frequencies = ~Table::new();
generate_frequencies(frequencies, input, 1);
print_frequencies(frequencies, 1);
*frequencies = Table::new();
generate_frequencies(frequencies, input, 2);
print_frequencies(frequencies, 2);
for range(0, 5) |i| {
let occurrence = OCCURRENCES[i];
*frequencies = Table::new();
generate_frequencies(frequencies,
input,
occurrence.len() as i32);
print_occurrences(frequencies, occurrence);
}
}
|
{
let mut entry = ~Entry {
code: key,
count: 0,
next: None,
};
c.f(entry);
self.items[index] = Some(entry);
return;
}
|
conditional_block
|
shootout-k-nucleotide.rs
|
// xfail-test
extern mod extra;
use std::cast::transmute;
use std::i32::range;
use std::libc::{STDIN_FILENO, c_int, fdopen, fgets, fileno, fopen, fstat};
use std::libc::{stat, strlen};
use std::ptr::null;
use std::unstable::intrinsics::init;
use std::vec::{reverse};
use extra::sort::quick_sort3;
static LINE_LEN: uint = 80;
static TABLE: [u8,..4] = [ 'A' as u8, 'C' as u8, 'G' as u8, 'T' as u8 ];
static TABLE_SIZE: uint = 2 << 16;
static OCCURRENCES: [&'static str,..5] = [
"GGT",
"GGTA",
"GGTATT",
"GGTATTTTAATT",
"GGTATTTTAATTTATAGT",
];
// Code implementation
#[deriving(Eq, Ord)]
struct Code(u64);
impl Code {
fn hash(&self) -> u64 {
**self
}
#[inline(always)]
fn push_char(&self, c: u8) -> Code {
Code((**self << 2) + (pack_symbol(c) as u64))
}
fn rotate(&self, c: u8, frame: i32) -> Code {
Code(*self.push_char(c) & ((1u64 << (2 * (frame as u64))) - 1))
}
fn pack(string: &str) -> Code {
let mut code = Code(0u64);
for uint::range(0, string.len()) |i| {
code = code.push_char(string[i]);
}
code
}
// XXX: Inefficient.
fn unpack(&self, frame: i32) -> ~str {
let mut key = **self;
let mut result = ~[];
for (frame as uint).times {
result.push(unpack_symbol((key as u8) & 3));
key >>= 2;
}
reverse(result);
str::from_bytes(result)
}
}
// Hash table implementation
trait TableCallback {
fn f(&self, entry: &mut Entry);
}
struct BumpCallback;
impl TableCallback for BumpCallback {
fn f(&self, entry: &mut Entry) {
entry.count += 1;
}
}
struct PrintCallback(&'static str);
impl TableCallback for PrintCallback {
fn f(&self, entry: &mut Entry) {
println(fmt!("%d\t%s", entry.count as int, **self));
}
}
struct Entry {
code: Code,
count: i32,
next: Option<~Entry>,
}
struct Table {
count: i32,
items: [Option<~Entry>,..TABLE_SIZE]
}
impl Table {
fn new() -> Table {
Table {
count: 0,
items: [ None,..TABLE_SIZE ],
}
}
fn search_remainder<C:TableCallback>(item: &mut Entry, key: Code, c: C) {
match item.next {
None => {
let mut entry = ~Entry {
code: key,
count: 0,
next: None,
};
c.f(entry);
item.next = Some(entry);
}
Some(ref mut entry) => {
if entry.code == key {
c.f(*entry);
return;
}
Table::search_remainder(*entry, key, c)
}
}
}
fn lookup<C:TableCallback>(&mut self, key: Code, c: C) {
let index = *key % (TABLE_SIZE as u64);
{
if self.items[index].is_none() {
let mut entry = ~Entry {
code: key,
count: 0,
next: None,
};
c.f(entry);
self.items[index] = Some(entry);
return;
}
}
{
let mut entry = &mut *self.items[index].get_mut_ref();
if entry.code == key {
c.f(*entry);
return;
}
Table::search_remainder(*entry, key, c)
}
}
fn each(&self, f: &fn(entry: &Entry) -> bool) {
for self.items.each |item| {
match *item {
None => {}
Some(ref item) => {
let mut item: &Entry = *item;
loop {
if!f(item) {
return;
}
match item.next {
None => break,
Some(ref next_item) => item = &**next_item,
}
}
}
};
}
}
}
// Main program
fn pack_symbol(c: u8) -> u8 {
match c {
'a' as u8 | 'A' as u8 => 0,
'c' as u8 | 'C' as u8 => 1,
'g' as u8 | 'G' as u8 => 2,
't' as u8 | 'T' as u8 => 3,
_ => fail!(c.to_str())
}
}
fn unpack_symbol(c: u8) -> u8 {
|
buf = buf.slice(1, buf.len());
if buf.len() == 0 {
break;
}
if buf[0]!= (''as u8) && buf[0]!= ('\t' as u8) &&
buf[0]!= ('\n' as u8) && buf[0]!= 0 {
break;
}
}
buf
}
#[inline(never)]
fn read_stdin() -> ~[u8] {
unsafe {
let mode = "r";
//let stdin = fdopen(STDIN_FILENO as c_int, transmute(&mode[0]));
let path = "knucleotide-input.txt";
let stdin = fopen(transmute(&path[0]), transmute(&mode[0]));
let mut st: stat = init();
fstat(fileno(stdin), &mut st);
let mut buf = vec::from_elem(st.st_size as uint, 0);
let header = ">THREE".as_bytes();
{
let mut window: &mut [u8] = buf;
loop {
fgets(transmute(&mut window[0]), LINE_LEN as c_int, stdin);
{
if window.slice(0, 6) == header {
break;
}
}
}
while fgets(transmute(&mut window[0]),
LINE_LEN as c_int,
stdin)!= null() {
window = window.mut_slice(strlen(transmute(&window[0])) as uint, window.len());
}
}
buf
}
}
#[inline(never)]
#[fixed_stack_segment]
fn generate_frequencies(frequencies: &mut Table,
mut input: &[u8],
frame: i32) {
let mut code = Code(0);
// Pull first frame.
for (frame as uint).times {
code = code.push_char(input[0]);
input = next_char(input);
}
frequencies.lookup(code, BumpCallback);
while input.len()!= 0 && input[0]!= ('>' as u8) {
code = code.rotate(input[0], frame);
frequencies.lookup(code, BumpCallback);
input = next_char(input);
}
}
#[inline(never)]
#[fixed_stack_segment]
fn print_frequencies(frequencies: &Table, frame: i32) {
let mut vector = ~[];
for frequencies.each |entry| {
vector.push((entry.code, entry.count));
}
quick_sort3(vector);
let mut total_count = 0;
for vector.each |&(_, count)| {
total_count += count;
}
for vector.each |&(key, count)| {
println(fmt!("%s %.3f",
key.unpack(frame),
(count as float * 100.0) / (total_count as float)));
}
}
fn print_occurrences(frequencies: &mut Table, occurrence: &'static str) {
frequencies.lookup(Code::pack(occurrence), PrintCallback(occurrence))
}
#[fixed_stack_segment]
fn main() {
let input = read_stdin();
let mut frequencies = ~Table::new();
generate_frequencies(frequencies, input, 1);
print_frequencies(frequencies, 1);
*frequencies = Table::new();
generate_frequencies(frequencies, input, 2);
print_frequencies(frequencies, 2);
for range(0, 5) |i| {
let occurrence = OCCURRENCES[i];
*frequencies = Table::new();
generate_frequencies(frequencies,
input,
occurrence.len() as i32);
print_occurrences(frequencies, occurrence);
}
}
|
TABLE[c]
}
fn next_char<'a>(mut buf: &'a [u8]) -> &'a [u8] {
loop {
|
random_line_split
|
shootout-k-nucleotide.rs
|
// xfail-test
extern mod extra;
use std::cast::transmute;
use std::i32::range;
use std::libc::{STDIN_FILENO, c_int, fdopen, fgets, fileno, fopen, fstat};
use std::libc::{stat, strlen};
use std::ptr::null;
use std::unstable::intrinsics::init;
use std::vec::{reverse};
use extra::sort::quick_sort3;
static LINE_LEN: uint = 80;
static TABLE: [u8,..4] = [ 'A' as u8, 'C' as u8, 'G' as u8, 'T' as u8 ];
static TABLE_SIZE: uint = 2 << 16;
static OCCURRENCES: [&'static str,..5] = [
"GGT",
"GGTA",
"GGTATT",
"GGTATTTTAATT",
"GGTATTTTAATTTATAGT",
];
// Code implementation
#[deriving(Eq, Ord)]
struct Code(u64);
impl Code {
fn hash(&self) -> u64 {
**self
}
#[inline(always)]
fn push_char(&self, c: u8) -> Code {
Code((**self << 2) + (pack_symbol(c) as u64))
}
fn rotate(&self, c: u8, frame: i32) -> Code {
Code(*self.push_char(c) & ((1u64 << (2 * (frame as u64))) - 1))
}
fn pack(string: &str) -> Code {
let mut code = Code(0u64);
for uint::range(0, string.len()) |i| {
code = code.push_char(string[i]);
}
code
}
// XXX: Inefficient.
fn unpack(&self, frame: i32) -> ~str
|
reverse(result);
str::from_bytes(result)
}
}
// Hash table implementation
trait TableCallback {
fn f(&self, entry: &mut Entry);
}
struct BumpCallback;
impl TableCallback for BumpCallback {
fn f(&self, entry: &mut Entry) {
entry.count += 1;
}
}
struct PrintCallback(&'static str);
impl TableCallback for PrintCallback {
fn f(&self, entry: &mut Entry) {
println(fmt!("%d\t%s", entry.count as int, **self));
}
}
struct Entry {
code: Code,
count: i32,
next: Option<~Entry>,
}
struct Table {
count: i32,
items: [Option<~Entry>,..TABLE_SIZE]
}
impl Table {
fn new() -> Table {
Table {
count: 0,
items: [ None,..TABLE_SIZE ],
}
}
fn search_remainder<C:TableCallback>(item: &mut Entry, key: Code, c: C) {
match item.next {
None => {
let mut entry = ~Entry {
code: key,
count: 0,
next: None,
};
c.f(entry);
item.next = Some(entry);
}
Some(ref mut entry) => {
if entry.code == key {
c.f(*entry);
return;
}
Table::search_remainder(*entry, key, c)
}
}
}
fn lookup<C:TableCallback>(&mut self, key: Code, c: C) {
let index = *key % (TABLE_SIZE as u64);
{
if self.items[index].is_none() {
let mut entry = ~Entry {
code: key,
count: 0,
next: None,
};
c.f(entry);
self.items[index] = Some(entry);
return;
}
}
{
let mut entry = &mut *self.items[index].get_mut_ref();
if entry.code == key {
c.f(*entry);
return;
}
Table::search_remainder(*entry, key, c)
}
}
fn each(&self, f: &fn(entry: &Entry) -> bool) {
for self.items.each |item| {
match *item {
None => {}
Some(ref item) => {
let mut item: &Entry = *item;
loop {
if!f(item) {
return;
}
match item.next {
None => break,
Some(ref next_item) => item = &**next_item,
}
}
}
};
}
}
}
// Main program
fn pack_symbol(c: u8) -> u8 {
match c {
'a' as u8 | 'A' as u8 => 0,
'c' as u8 | 'C' as u8 => 1,
'g' as u8 | 'G' as u8 => 2,
't' as u8 | 'T' as u8 => 3,
_ => fail!(c.to_str())
}
}
fn unpack_symbol(c: u8) -> u8 {
TABLE[c]
}
fn next_char<'a>(mut buf: &'a [u8]) -> &'a [u8] {
loop {
buf = buf.slice(1, buf.len());
if buf.len() == 0 {
break;
}
if buf[0]!= (''as u8) && buf[0]!= ('\t' as u8) &&
buf[0]!= ('\n' as u8) && buf[0]!= 0 {
break;
}
}
buf
}
#[inline(never)]
fn read_stdin() -> ~[u8] {
unsafe {
let mode = "r";
//let stdin = fdopen(STDIN_FILENO as c_int, transmute(&mode[0]));
let path = "knucleotide-input.txt";
let stdin = fopen(transmute(&path[0]), transmute(&mode[0]));
let mut st: stat = init();
fstat(fileno(stdin), &mut st);
let mut buf = vec::from_elem(st.st_size as uint, 0);
let header = ">THREE".as_bytes();
{
let mut window: &mut [u8] = buf;
loop {
fgets(transmute(&mut window[0]), LINE_LEN as c_int, stdin);
{
if window.slice(0, 6) == header {
break;
}
}
}
while fgets(transmute(&mut window[0]),
LINE_LEN as c_int,
stdin)!= null() {
window = window.mut_slice(strlen(transmute(&window[0])) as uint, window.len());
}
}
buf
}
}
#[inline(never)]
#[fixed_stack_segment]
fn generate_frequencies(frequencies: &mut Table,
mut input: &[u8],
frame: i32) {
let mut code = Code(0);
// Pull first frame.
for (frame as uint).times {
code = code.push_char(input[0]);
input = next_char(input);
}
frequencies.lookup(code, BumpCallback);
while input.len()!= 0 && input[0]!= ('>' as u8) {
code = code.rotate(input[0], frame);
frequencies.lookup(code, BumpCallback);
input = next_char(input);
}
}
#[inline(never)]
#[fixed_stack_segment]
fn print_frequencies(frequencies: &Table, frame: i32) {
let mut vector = ~[];
for frequencies.each |entry| {
vector.push((entry.code, entry.count));
}
quick_sort3(vector);
let mut total_count = 0;
for vector.each |&(_, count)| {
total_count += count;
}
for vector.each |&(key, count)| {
println(fmt!("%s %.3f",
key.unpack(frame),
(count as float * 100.0) / (total_count as float)));
}
}
fn print_occurrences(frequencies: &mut Table, occurrence: &'static str) {
frequencies.lookup(Code::pack(occurrence), PrintCallback(occurrence))
}
#[fixed_stack_segment]
fn main() {
let input = read_stdin();
let mut frequencies = ~Table::new();
generate_frequencies(frequencies, input, 1);
print_frequencies(frequencies, 1);
*frequencies = Table::new();
generate_frequencies(frequencies, input, 2);
print_frequencies(frequencies, 2);
for range(0, 5) |i| {
let occurrence = OCCURRENCES[i];
*frequencies = Table::new();
generate_frequencies(frequencies,
input,
occurrence.len() as i32);
print_occurrences(frequencies, occurrence);
}
}
|
{
let mut key = **self;
let mut result = ~[];
for (frame as uint).times {
result.push(unpack_symbol((key as u8) & 3));
key >>= 2;
}
|
identifier_body
|
lib.rs
|
// DO NOT EDIT!
// This file was generated automatically from'src/mako/api/lib.rs.mako'
// DO NOT EDIT!
//! This documentation was generated from *Shopping Content* crate version *0.1.8+20150528*, where *20150528* is the exact revision of the *content:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v0.1.8*.
//!
//! Everything else about the *Shopping Content* *v2* API can be found at the
//! [official documentation site](https://developers.google.com/shopping-content/v2/).
//! The original source code is [on github](https://github.com/Byron/google-apis-rs/tree/master/gen/content2).
//! # Features
//!
//! Handle the following *Resources* with ease from the central [hub](struct.ShoppingContent.html)...
//!
//! * [accounts](struct.Account.html)
//! * [*authinfo*](struct.AccountAuthinfoCall.html), [*custombatch*](struct.AccountCustombatchCall.html), [*delete*](struct.AccountDeleteCall.html), [*get*](struct.AccountGetCall.html), [*insert*](struct.AccountInsertCall.html), [*list*](struct.AccountListCall.html), [*patch*](struct.AccountPatchCall.html) and [*update*](struct.AccountUpdateCall.html)
//! * accountshipping
//! * [*custombatch*](struct.AccountshippingCustombatchCall.html), [*get*](struct.AccountshippingGetCall.html), [*list*](struct.AccountshippingListCall.html), [*patch*](struct.AccountshippingPatchCall.html) and [*update*](struct.AccountshippingUpdateCall.html)
//! * accountstatuses
//! * [*custombatch*](struct.AccountstatuseCustombatchCall.html), [*get*](struct.AccountstatuseGetCall.html) and [*list*](struct.AccountstatuseListCall.html)
//! * accounttax
//! * [*custombatch*](struct.AccounttaxCustombatchCall.html), [*get*](struct.AccounttaxGetCall.html), [*list*](struct.AccounttaxListCall.html), [*patch*](struct.AccounttaxPatchCall.html) and [*update*](struct.AccounttaxUpdateCall.html)
//! * [datafeeds](struct.Datafeed.html)
//! * [*custombatch*](struct.DatafeedCustombatchCall.html), [*delete*](struct.DatafeedDeleteCall.html), [*get*](struct.DatafeedGetCall.html), [*insert*](struct.DatafeedInsertCall.html), [*list*](struct.DatafeedListCall.html), [*patch*](struct.DatafeedPatchCall.html) and [*update*](struct.DatafeedUpdateCall.html)
//! * datafeedstatuses
//! * [*custombatch*](struct.DatafeedstatuseCustombatchCall.html), [*get*](struct.DatafeedstatuseGetCall.html) and [*list*](struct.DatafeedstatuseListCall.html)
//! * [inventory](struct.Inventory.html)
//! * [*custombatch*](struct.InventoryCustombatchCall.html) and [*set*](struct.InventorySetCall.html)
//! * [products](struct.Product.html)
//! * [*custombatch*](struct.ProductCustombatchCall.html), [*delete*](struct.ProductDeleteCall.html), [*get*](struct.ProductGetCall.html), [*insert*](struct.ProductInsertCall.html) and [*list*](struct.ProductListCall.html)
//! * productstatuses
//! * [*custombatch*](struct.ProductstatuseCustombatchCall.html), [*get*](struct.ProductstatuseGetCall.html) and [*list*](struct.ProductstatuseListCall.html)
//!
//!
//!
//!
//! Not what you are looking for? Find all other Google APIs in their Rust [documentation index](../index.html).
//!
//! # Structure of this Library
//!
//! The API is structured into the following primary items:
//!
//! * **[Hub](struct.ShoppingContent.html)**
//! * a central object to maintain state and allow accessing all *Activities*
//! * creates [*Method Builders*](trait.MethodsBuilder.html) which in turn
//! allow access to individual [*Call Builders*](trait.CallBuilder.html)
//! * **[Resources](trait.Resource.html)**
//! * primary types that you can apply *Activities* to
//! * a collection of properties and *Parts*
//! * **[Parts](trait.Part.html)**
//! * a collection of properties
//! * never directly used in *Activities*
//! * **[Activities](trait.CallBuilder.html)**
//! * operations to apply to *Resources*
//!
//! All *structures* are marked with applicable traits to further categorize them and ease browsing.
//!
//! Generally speaking, you can invoke *Activities* like this:
//!
//! ```Rust,ignore
//! let r = hub.resource().activity(...).doit()
//! ```
//!
//! Or specifically...
//!
|
//! let r = hub.accounts().get(...).doit()
//! let r = hub.accounts().update(...).doit()
//! let r = hub.accounts().list(...).doit()
//! let r = hub.accounts().authinfo(...).doit()
//! let r = hub.accounts().delete(...).doit()
//! let r = hub.accounts().insert(...).doit()
//! ```
//!
//! The `resource()` and `activity(...)` calls create [builders][builder-pattern]. The second one dealing with `Activities`
//! supports various methods to configure the impending operation (not shown here). It is made such that all required arguments have to be
//! specified right away (i.e. `(...)`), whereas all optional ones can be [build up][builder-pattern] as desired.
//! The `doit()` method performs the actual communication with the server and returns the respective result.
//!
//! # Usage
//!
//! ## Setting up your Project
//!
//! To use this library, you would put the following lines into your `Cargo.toml` file:
//!
//! ```toml
//! [dependencies]
//! google-content2 = "*"
//! ```
//!
//! ## A complete example
//!
//! ```test_harness,no_run
//! extern crate hyper;
//! extern crate yup_oauth2 as oauth2;
//! extern crate google_content2 as content2;
//! use content2::Account;
//! use content2::{Result, Error};
//! # #[test] fn egal() {
//! use std::default::Default;
//! use oauth2::{Authenticator, DefaultAuthenticatorDelegate, ApplicationSecret, MemoryStorage};
//! use content2::ShoppingContent;
//!
//! // Get an ApplicationSecret instance by some means. It contains the `client_id` and
//! // `client_secret`, among other things.
//! let secret: ApplicationSecret = Default::default();
//! // Instantiate the authenticator. It will choose a suitable authentication flow for you,
//! // unless you replace `None` with the desired Flow.
//! // Provide your own `AuthenticatorDelegate` to adjust the way it operates and get feedback about
//! // what's going on. You probably want to bring in your own `TokenStorage` to persist tokens and
//! // retrieve them from storage.
//! let auth = Authenticator::new(&secret, DefaultAuthenticatorDelegate,
//! hyper::Client::new(),
//! <MemoryStorage as Default>::default(), None);
//! let mut hub = ShoppingContent::new(hyper::Client::new(), auth);
//! // As the method needs a request, you would usually fill it with the desired information
//! // into the respective structure. Some of the parts shown here might not be applicable!
//! // Values shown here are possibly random and not representative!
//! let mut req = Account::default();
//!
//! // You can configure optional parameters by calling the respective setters at will, and
//! // execute the final call using `doit()`.
//! // Values shown here are possibly random and not representative!
//! let result = hub.accounts().patch(req, "merchantId", "accountId")
//! .doit();
//!
//! match result {
//! Err(e) => match e {
//! // The Error enum provides details about what exactly happened.
//! // You can also just use its `Debug`, `Display` or `Error` traits
//! Error::HttpError(_)
//! |Error::MissingAPIKey
//! |Error::MissingToken(_)
//! |Error::Cancelled
//! |Error::UploadSizeLimitExceeded(_, _)
//! |Error::Failure(_)
//! |Error::BadRequest(_)
//! |Error::FieldClash(_)
//! |Error::JsonDecodeError(_, _) => println!("{}", e),
//! },
//! Ok(res) => println!("Success: {:?}", res),
//! }
//! # }
//! ```
//! ## Handling Errors
//!
//! All errors produced by the system are provided either as [Result](enum.Result.html) enumeration as return value of
//! the doit() methods, or handed as possibly intermediate results to either the
//! [Hub Delegate](trait.Delegate.html), or the [Authenticator Delegate](../yup-oauth2/trait.AuthenticatorDelegate.html).
//!
//! When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This
//! makes the system potentially resilient to all kinds of errors.
//!
//! ## Uploads and Downloads
//! If a method supports downloads, the response body, which is part of the [Result](enum.Result.html), should be
//! read by you to obtain the media.
//! If such a method also supports a [Response Result](trait.ResponseResult.html), it will return that by default.
//! You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making
//! this call: `.param("alt", "media")`.
//!
//! Methods supporting uploads can do so using up to 2 different protocols:
//! *simple* and *resumable*. The distinctiveness of each is represented by customized
//! `doit(...)` methods, which are then named `upload(...)` and `upload_resumable(...)` respectively.
//!
//! ## Customization and Callbacks
//!
//! You may alter the way an `doit()` method is called by providing a [delegate](trait.Delegate.html) to the
//! [Method Builder](trait.CallBuilder.html) before making the final `doit()` call.
//! Respective methods will be called to provide progress information, as well as determine whether the system should
//! retry on failure.
//!
//! The [delegate trait](trait.Delegate.html) is default-implemented, allowing you to customize it with minimal effort.
//!
//! ## Optional Parts in Server-Requests
//!
//! All structures provided by this library are made to be [enocodable](trait.RequestValue.html) and
//! [decodable](trait.ResponseResult.html) via *json*. Optionals are used to indicate that partial requests are responses
//! are valid.
//! Most optionals are are considered [Parts](trait.Part.html) which are identifiable by name, which will be sent to
//! the server to indicate either the set parts of the request or the desired parts in the response.
//!
//! ## Builder Arguments
//!
//! Using [method builders](trait.CallBuilder.html), you are able to prepare an action call by repeatedly calling it's methods.
//! These will always take a single argument, for which the following statements are true.
//!
//! * [PODs][wiki-pod] are handed by copy
//! * strings are passed as `&str`
//! * [request values](trait.RequestValue.html) are moved
//!
//! Arguments will always be copied or cloned into the builder, to make them independent of their original life times.
//!
//! [wiki-pod]: http://en.wikipedia.org/wiki/Plain_old_data_structure
//! [builder-pattern]: http://en.wikipedia.org/wiki/Builder_pattern
//! [google-go-api]: https://github.com/google/google-api-go-client
//!
//!
// Unused attributes happen thanks to defined, but unused structures
// We don't warn about this, as depending on the API, some data structures or facilities are never used.
// Instead of pre-determining this, we just disable the lint. It's manually tuned to not have any
// unused imports in fully featured APIs. Same with unused_mut....
#![allow(unused_imports, unused_mut, dead_code)]
include!(concat!(env!("OUT_DIR"), "/lib.rs"));
|
//! ```ignore
//! let r = hub.accounts().custombatch(...).doit()
//! let r = hub.accounts().patch(...).doit()
|
random_line_split
|
many_single_char_names.rs
|
#![allow(clippy::too_many_arguments, clippy::diverging_sub_expression)]
#![warn(clippy::many_single_char_names)]
fn bla() {
let a: i32;
let (b, c, d): (i32, i64, i16);
{
{
let cdefg: i32;
let blar: i32;
}
{
let e: i32;
}
{
let e: i32;
let f: i32;
}
match 5 {
1 => println!(),
e => panic!(),
}
match 5 {
1 => println!(),
_ => panic!(),
}
}
}
fn
|
(a: i32, b: i32, c: i32, d: i32, e: i32, f: i32, g: i32, h: i32) {}
fn bindings2() {
let (a, b, c, d, e, f, g, h): (bool, bool, bool, bool, bool, bool, bool, bool) = unimplemented!();
}
fn shadowing() {
let a = 0i32;
let a = 0i32;
let a = 0i32;
let a = 0i32;
let a = 0i32;
let a = 0i32;
{
let a = 0i32;
}
}
fn patterns() {
enum Z {
A(i32),
B(i32),
C(i32),
D(i32),
E(i32),
F(i32),
}
// These should not trigger a warning, since the pattern bindings are a new scope.
match Z::A(0) {
Z::A(a) => {},
Z::B(b) => {},
Z::C(c) => {},
Z::D(d) => {},
Z::E(e) => {},
Z::F(f) => {},
}
}
#[allow(clippy::many_single_char_names)]
fn issue_3198_allow_works() {
let (a, b, c, d, e) = (0, 0, 0, 0, 0);
}
fn main() {}
|
bindings
|
identifier_name
|
many_single_char_names.rs
|
#![allow(clippy::too_many_arguments, clippy::diverging_sub_expression)]
#![warn(clippy::many_single_char_names)]
fn bla() {
let a: i32;
let (b, c, d): (i32, i64, i16);
{
{
let cdefg: i32;
let blar: i32;
}
{
let e: i32;
}
{
let e: i32;
let f: i32;
}
match 5 {
1 => println!(),
e => panic!(),
}
match 5 {
1 => println!(),
_ => panic!(),
|
}
}
}
fn bindings(a: i32, b: i32, c: i32, d: i32, e: i32, f: i32, g: i32, h: i32) {}
fn bindings2() {
let (a, b, c, d, e, f, g, h): (bool, bool, bool, bool, bool, bool, bool, bool) = unimplemented!();
}
fn shadowing() {
let a = 0i32;
let a = 0i32;
let a = 0i32;
let a = 0i32;
let a = 0i32;
let a = 0i32;
{
let a = 0i32;
}
}
fn patterns() {
enum Z {
A(i32),
B(i32),
C(i32),
D(i32),
E(i32),
F(i32),
}
// These should not trigger a warning, since the pattern bindings are a new scope.
match Z::A(0) {
Z::A(a) => {},
Z::B(b) => {},
Z::C(c) => {},
Z::D(d) => {},
Z::E(e) => {},
Z::F(f) => {},
}
}
#[allow(clippy::many_single_char_names)]
fn issue_3198_allow_works() {
let (a, b, c, d, e) = (0, 0, 0, 0, 0);
}
fn main() {}
|
random_line_split
|
|
many_single_char_names.rs
|
#![allow(clippy::too_many_arguments, clippy::diverging_sub_expression)]
#![warn(clippy::many_single_char_names)]
fn bla() {
let a: i32;
let (b, c, d): (i32, i64, i16);
{
{
let cdefg: i32;
let blar: i32;
}
{
let e: i32;
}
{
let e: i32;
let f: i32;
}
match 5 {
1 => println!(),
e => panic!(),
}
match 5 {
1 => println!(),
_ => panic!(),
}
}
}
fn bindings(a: i32, b: i32, c: i32, d: i32, e: i32, f: i32, g: i32, h: i32) {}
fn bindings2()
|
fn shadowing() {
let a = 0i32;
let a = 0i32;
let a = 0i32;
let a = 0i32;
let a = 0i32;
let a = 0i32;
{
let a = 0i32;
}
}
fn patterns() {
enum Z {
A(i32),
B(i32),
C(i32),
D(i32),
E(i32),
F(i32),
}
// These should not trigger a warning, since the pattern bindings are a new scope.
match Z::A(0) {
Z::A(a) => {},
Z::B(b) => {},
Z::C(c) => {},
Z::D(d) => {},
Z::E(e) => {},
Z::F(f) => {},
}
}
#[allow(clippy::many_single_char_names)]
fn issue_3198_allow_works() {
let (a, b, c, d, e) = (0, 0, 0, 0, 0);
}
fn main() {}
|
{
let (a, b, c, d, e, f, g, h): (bool, bool, bool, bool, bool, bool, bool, bool) = unimplemented!();
}
|
identifier_body
|
plugin.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::RefCell;
use std::collections::BTreeMap;
use ast;
use ast::{Ident, Name, TokenTree};
use codemap::Span;
use ext::base::{ExtCtxt, MacEager, MacResult};
use ext::build::AstBuilder;
use parse::token;
use ptr::P;
use util::small_vector::SmallVector;
thread_local! {
static REGISTERED_DIAGNOSTICS: RefCell<BTreeMap<Name, Option<Name>>> = {
RefCell::new(BTreeMap::new())
}
}
thread_local! {
static USED_DIAGNOSTICS: RefCell<BTreeMap<Name, Span>> = {
RefCell::new(BTreeMap::new())
}
}
fn with_registered_diagnostics<T, F>(f: F) -> T where
F: FnOnce(&mut BTreeMap<Name, Option<Name>>) -> T,
{
REGISTERED_DIAGNOSTICS.with(move |slot| {
f(&mut *slot.borrow_mut())
})
}
fn with_used_diagnostics<T, F>(f: F) -> T where
F: FnOnce(&mut BTreeMap<Name, Span>) -> T,
{
USED_DIAGNOSTICS.with(move |slot| {
f(&mut *slot.borrow_mut())
})
}
pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
span: Span,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let code = match token_tree {
[ast::TtToken(_, token::Ident(code, _))] => code,
_ => unreachable!()
};
with_used_diagnostics(|diagnostics| {
match diagnostics.insert(code.name, span) {
Some(previous_span) => {
ecx.span_warn(span, &format!(
"diagnostic code {} already used", &token::get_ident(code)
));
ecx.span_note(previous_span, "previous invocation");
},
None => ()
}
()
});
with_registered_diagnostics(|diagnostics| {
if!diagnostics.contains_key(&code.name) {
ecx.span_err(span, &format!(
"used diagnostic code {} not registered", &token::get_ident(code)
));
}
});
MacEager::expr(quote_expr!(ecx, ()))
}
pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
span: Span,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let (code, description) = match token_tree {
[ast::TtToken(_, token::Ident(ref code, _))] => {
(code, None)
},
[ast::TtToken(_, token::Ident(ref code, _)),
ast::TtToken(_, token::Comma),
ast::TtToken(_, token::Literal(token::StrRaw(description, _), None))] => {
(code, Some(description))
}
_ => unreachable!()
};
with_registered_diagnostics(|diagnostics| {
if diagnostics.insert(code.name, description).is_some() {
ecx.span_err(span, &format!(
"diagnostic code {} already registered", &token::get_ident(*code)
));
}
});
let sym = Ident::new(token::gensym(&(
"__register_diagnostic_".to_string() + &token::get_ident(*code)
)));
MacEager::items(SmallVector::many(vec![quote_item!(ecx, mod $sym {}).unwrap()]))
}
pub fn
|
<'cx>(ecx: &'cx mut ExtCtxt,
span: Span,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let name = match token_tree {
[ast::TtToken(_, token::Ident(ref name, _))] => name,
_ => unreachable!()
};
let (count, expr) =
with_registered_diagnostics(|diagnostics| {
let descriptions: Vec<P<ast::Expr>> =
diagnostics.iter().filter_map(|(code, description)| {
description.map(|description| {
ecx.expr_tuple(span, vec![
ecx.expr_str(span, token::get_name(*code)),
ecx.expr_str(span, token::get_name(description))])
})
}).collect();
(descriptions.len(), ecx.expr_vec(span, descriptions))
});
MacEager::items(SmallVector::many(vec![quote_item!(ecx,
pub static $name: [(&'static str, &'static str); $count] = $expr;
).unwrap()]))
}
|
expand_build_diagnostic_array
|
identifier_name
|
plugin.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::RefCell;
use std::collections::BTreeMap;
use ast;
use ast::{Ident, Name, TokenTree};
use codemap::Span;
use ext::base::{ExtCtxt, MacEager, MacResult};
use ext::build::AstBuilder;
use parse::token;
use ptr::P;
use util::small_vector::SmallVector;
thread_local! {
static REGISTERED_DIAGNOSTICS: RefCell<BTreeMap<Name, Option<Name>>> = {
RefCell::new(BTreeMap::new())
}
}
thread_local! {
static USED_DIAGNOSTICS: RefCell<BTreeMap<Name, Span>> = {
RefCell::new(BTreeMap::new())
}
}
fn with_registered_diagnostics<T, F>(f: F) -> T where
F: FnOnce(&mut BTreeMap<Name, Option<Name>>) -> T,
{
REGISTERED_DIAGNOSTICS.with(move |slot| {
f(&mut *slot.borrow_mut())
})
}
fn with_used_diagnostics<T, F>(f: F) -> T where
F: FnOnce(&mut BTreeMap<Name, Span>) -> T,
{
USED_DIAGNOSTICS.with(move |slot| {
f(&mut *slot.borrow_mut())
})
}
pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
span: Span,
token_tree: &[TokenTree])
-> Box<MacResult+'cx>
|
"used diagnostic code {} not registered", &token::get_ident(code)
));
}
});
MacEager::expr(quote_expr!(ecx, ()))
}
pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
span: Span,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let (code, description) = match token_tree {
[ast::TtToken(_, token::Ident(ref code, _))] => {
(code, None)
},
[ast::TtToken(_, token::Ident(ref code, _)),
ast::TtToken(_, token::Comma),
ast::TtToken(_, token::Literal(token::StrRaw(description, _), None))] => {
(code, Some(description))
}
_ => unreachable!()
};
with_registered_diagnostics(|diagnostics| {
if diagnostics.insert(code.name, description).is_some() {
ecx.span_err(span, &format!(
"diagnostic code {} already registered", &token::get_ident(*code)
));
}
});
let sym = Ident::new(token::gensym(&(
"__register_diagnostic_".to_string() + &token::get_ident(*code)
)));
MacEager::items(SmallVector::many(vec![quote_item!(ecx, mod $sym {}).unwrap()]))
}
pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
span: Span,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let name = match token_tree {
[ast::TtToken(_, token::Ident(ref name, _))] => name,
_ => unreachable!()
};
let (count, expr) =
with_registered_diagnostics(|diagnostics| {
let descriptions: Vec<P<ast::Expr>> =
diagnostics.iter().filter_map(|(code, description)| {
description.map(|description| {
ecx.expr_tuple(span, vec![
ecx.expr_str(span, token::get_name(*code)),
ecx.expr_str(span, token::get_name(description))])
})
}).collect();
(descriptions.len(), ecx.expr_vec(span, descriptions))
});
MacEager::items(SmallVector::many(vec![quote_item!(ecx,
pub static $name: [(&'static str, &'static str); $count] = $expr;
).unwrap()]))
}
|
{
let code = match token_tree {
[ast::TtToken(_, token::Ident(code, _))] => code,
_ => unreachable!()
};
with_used_diagnostics(|diagnostics| {
match diagnostics.insert(code.name, span) {
Some(previous_span) => {
ecx.span_warn(span, &format!(
"diagnostic code {} already used", &token::get_ident(code)
));
ecx.span_note(previous_span, "previous invocation");
},
None => ()
}
()
});
with_registered_diagnostics(|diagnostics| {
if !diagnostics.contains_key(&code.name) {
ecx.span_err(span, &format!(
|
identifier_body
|
plugin.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::RefCell;
use std::collections::BTreeMap;
use ast;
use ast::{Ident, Name, TokenTree};
use codemap::Span;
use ext::base::{ExtCtxt, MacEager, MacResult};
use ext::build::AstBuilder;
use parse::token;
use ptr::P;
use util::small_vector::SmallVector;
thread_local! {
static REGISTERED_DIAGNOSTICS: RefCell<BTreeMap<Name, Option<Name>>> = {
RefCell::new(BTreeMap::new())
}
}
thread_local! {
static USED_DIAGNOSTICS: RefCell<BTreeMap<Name, Span>> = {
RefCell::new(BTreeMap::new())
}
}
fn with_registered_diagnostics<T, F>(f: F) -> T where
F: FnOnce(&mut BTreeMap<Name, Option<Name>>) -> T,
{
REGISTERED_DIAGNOSTICS.with(move |slot| {
f(&mut *slot.borrow_mut())
})
}
fn with_used_diagnostics<T, F>(f: F) -> T where
F: FnOnce(&mut BTreeMap<Name, Span>) -> T,
{
USED_DIAGNOSTICS.with(move |slot| {
f(&mut *slot.borrow_mut())
})
}
pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
span: Span,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let code = match token_tree {
[ast::TtToken(_, token::Ident(code, _))] => code,
_ => unreachable!()
};
with_used_diagnostics(|diagnostics| {
match diagnostics.insert(code.name, span) {
Some(previous_span) => {
ecx.span_warn(span, &format!(
"diagnostic code {} already used", &token::get_ident(code)
));
ecx.span_note(previous_span, "previous invocation");
},
None => ()
}
()
});
with_registered_diagnostics(|diagnostics| {
if!diagnostics.contains_key(&code.name) {
ecx.span_err(span, &format!(
"used diagnostic code {} not registered", &token::get_ident(code)
));
}
});
MacEager::expr(quote_expr!(ecx, ()))
}
pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
span: Span,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let (code, description) = match token_tree {
[ast::TtToken(_, token::Ident(ref code, _))] => {
(code, None)
},
[ast::TtToken(_, token::Ident(ref code, _)),
ast::TtToken(_, token::Comma),
ast::TtToken(_, token::Literal(token::StrRaw(description, _), None))] => {
(code, Some(description))
}
_ => unreachable!()
};
with_registered_diagnostics(|diagnostics| {
if diagnostics.insert(code.name, description).is_some() {
ecx.span_err(span, &format!(
"diagnostic code {} already registered", &token::get_ident(*code)
));
}
});
let sym = Ident::new(token::gensym(&(
"__register_diagnostic_".to_string() + &token::get_ident(*code)
)));
|
pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
span: Span,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let name = match token_tree {
[ast::TtToken(_, token::Ident(ref name, _))] => name,
_ => unreachable!()
};
let (count, expr) =
with_registered_diagnostics(|diagnostics| {
let descriptions: Vec<P<ast::Expr>> =
diagnostics.iter().filter_map(|(code, description)| {
description.map(|description| {
ecx.expr_tuple(span, vec![
ecx.expr_str(span, token::get_name(*code)),
ecx.expr_str(span, token::get_name(description))])
})
}).collect();
(descriptions.len(), ecx.expr_vec(span, descriptions))
});
MacEager::items(SmallVector::many(vec![quote_item!(ecx,
pub static $name: [(&'static str, &'static str); $count] = $expr;
).unwrap()]))
}
|
MacEager::items(SmallVector::many(vec![quote_item!(ecx, mod $sym {}).unwrap()]))
}
|
random_line_split
|
multi.rs
|
//! # Multiple agent types
//! Simple example of multiple types of agents.
//! You can use an enum, where each variant is a different type of agent.
//! Here we're just using fields on enum variants for state, but you could also define separate
//! state structs that you wrap enum variants around.
extern crate djinn;
extern crate redis;
extern crate redis_cluster;
extern crate rustc_serialize;
use redis::Client;
use djinn::{Agent, Manager, Simulation, Population, Updates, Redis, run};
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
struct Cat {
purrs: usize,
}
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
struct Person {
health: isize,
}
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
enum State {
Person(Person),
Cat(Cat),
}
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
struct World {}
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
enum Update {
ChangeHealth(isize),
Purr,
}
#[derive(Clone)]
struct MultiSim;
impl Simulation for MultiSim {
type State = State;
type Update = Update;
type World = World;
fn decide<R: Redis>(&self,
agent: &Agent<Self::State>,
world: &Self::World,
population: &Population<Self, R>,
updates: &mut Updates<Self>)
-> () {
match agent.state {
State::Person(..) => {
updates.queue(agent.id, Update::ChangeHealth(-1));
}
State::Cat(..) => updates.queue(agent.id, Update::Purr),
}
}
fn update(&self, mut state: &mut Self::State, updates: Vec<Self::Update>) -> bool
|
}
}
updated
}
}
fn main() {
let sim = MultiSim {};
let world = World {};
// Setup the manager
let addr = "redis://127.0.0.1/";
let pop_client = Client::open(addr).unwrap();
let mut manager = Manager::new(addr, pop_client, sim.clone());
// Spawn the population
manager.spawns(vec![State::Person(Person { health: 100 }), State::Cat(Cat { purrs: 0 })]);
manager = run(sim, world, manager, 4, 10);
}
|
{
let updated = updates.len() > 0;
for update in updates {
match *state {
State::Cat(ref mut cat) => {
match update {
Update::Purr => {
cat.purrs += 1;
}
_ => (),
}
}
State::Person(ref mut person) => {
match update {
Update::ChangeHealth(change) => {
person.health += change;
}
_ => (),
}
}
|
identifier_body
|
multi.rs
|
//! # Multiple agent types
//! Simple example of multiple types of agents.
//! You can use an enum, where each variant is a different type of agent.
//! Here we're just using fields on enum variants for state, but you could also define separate
//! state structs that you wrap enum variants around.
extern crate djinn;
extern crate redis;
extern crate redis_cluster;
extern crate rustc_serialize;
use redis::Client;
use djinn::{Agent, Manager, Simulation, Population, Updates, Redis, run};
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
struct Cat {
purrs: usize,
}
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
struct Person {
health: isize,
}
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
enum State {
Person(Person),
Cat(Cat),
}
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
struct World {}
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
enum Update {
ChangeHealth(isize),
Purr,
}
#[derive(Clone)]
struct MultiSim;
impl Simulation for MultiSim {
type State = State;
type Update = Update;
type World = World;
fn decide<R: Redis>(&self,
agent: &Agent<Self::State>,
world: &Self::World,
population: &Population<Self, R>,
updates: &mut Updates<Self>)
-> () {
match agent.state {
State::Person(..) => {
updates.queue(agent.id, Update::ChangeHealth(-1));
}
State::Cat(..) => updates.queue(agent.id, Update::Purr),
}
}
fn
|
(&self, mut state: &mut Self::State, updates: Vec<Self::Update>) -> bool {
let updated = updates.len() > 0;
for update in updates {
match *state {
State::Cat(ref mut cat) => {
match update {
Update::Purr => {
cat.purrs += 1;
}
_ => (),
}
}
State::Person(ref mut person) => {
match update {
Update::ChangeHealth(change) => {
person.health += change;
}
_ => (),
}
}
}
}
updated
}
}
fn main() {
let sim = MultiSim {};
let world = World {};
// Setup the manager
let addr = "redis://127.0.0.1/";
let pop_client = Client::open(addr).unwrap();
let mut manager = Manager::new(addr, pop_client, sim.clone());
// Spawn the population
manager.spawns(vec![State::Person(Person { health: 100 }), State::Cat(Cat { purrs: 0 })]);
manager = run(sim, world, manager, 4, 10);
}
|
update
|
identifier_name
|
multi.rs
|
//! # Multiple agent types
//! Simple example of multiple types of agents.
//! You can use an enum, where each variant is a different type of agent.
//! Here we're just using fields on enum variants for state, but you could also define separate
//! state structs that you wrap enum variants around.
extern crate djinn;
extern crate redis;
extern crate redis_cluster;
extern crate rustc_serialize;
use redis::Client;
use djinn::{Agent, Manager, Simulation, Population, Updates, Redis, run};
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
struct Cat {
purrs: usize,
}
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
struct Person {
health: isize,
}
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
enum State {
Person(Person),
Cat(Cat),
}
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
struct World {}
#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]
enum Update {
ChangeHealth(isize),
Purr,
}
#[derive(Clone)]
struct MultiSim;
|
fn decide<R: Redis>(&self,
agent: &Agent<Self::State>,
world: &Self::World,
population: &Population<Self, R>,
updates: &mut Updates<Self>)
-> () {
match agent.state {
State::Person(..) => {
updates.queue(agent.id, Update::ChangeHealth(-1));
}
State::Cat(..) => updates.queue(agent.id, Update::Purr),
}
}
fn update(&self, mut state: &mut Self::State, updates: Vec<Self::Update>) -> bool {
let updated = updates.len() > 0;
for update in updates {
match *state {
State::Cat(ref mut cat) => {
match update {
Update::Purr => {
cat.purrs += 1;
}
_ => (),
}
}
State::Person(ref mut person) => {
match update {
Update::ChangeHealth(change) => {
person.health += change;
}
_ => (),
}
}
}
}
updated
}
}
fn main() {
let sim = MultiSim {};
let world = World {};
// Setup the manager
let addr = "redis://127.0.0.1/";
let pop_client = Client::open(addr).unwrap();
let mut manager = Manager::new(addr, pop_client, sim.clone());
// Spawn the population
manager.spawns(vec![State::Person(Person { health: 100 }), State::Cat(Cat { purrs: 0 })]);
manager = run(sim, world, manager, 4, 10);
}
|
impl Simulation for MultiSim {
type State = State;
type Update = Update;
type World = World;
|
random_line_split
|
strategies.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public License,
// v. 2.0. If a copy of the MPL was not distributed with this file, You can
// obtain one at http://mozilla.org/MPL/2.0/.
use futures::{future, Future, Sink, Stream};
use futures::future::Loop;
use libc;
use std::error;
use std::fmt::{self, Display, Formatter};
use std::io::{Error, ErrorKind};
use auth::client::Authenticator;
use auth::commands::{ClientCommand, ServerCommand, ServerGuid};
#[derive(Debug)]
pub enum AuthError {
Io(Error),
Rejected { supported_mechanisms: Vec<Vec<u8>> },
}
impl Display for AuthError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result
|
}
impl error::Error for AuthError {
fn description(&self) -> &str {
match *self {
AuthError::Io(ref err) => err.description(),
AuthError::Rejected {.. } => "D-Bus authentication rejected",
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
AuthError::Io(ref err) => Some(err),
AuthError::Rejected {.. } => None,
}
}
}
impl From<AuthError> for Error {
fn from(err: AuthError) -> Self {
match err {
AuthError::Io(e) => e,
AuthError::Rejected {.. } => {
Error::new(ErrorKind::PermissionDenied, "D-Bus authentication rejected")
}
}
}
}
impl From<Error> for AuthError {
fn from(err: Error) -> Self {
AuthError::Io(err)
}
}
pub fn auth_external
(auth: Authenticator)
-> impl Future<Item = (ServerGuid, Authenticator), Error = (AuthError, Option<Authenticator>)> {
let uid_str = unsafe { libc::getuid().to_string() };
let initial_cmd = ClientCommand::Auth {
mechanism: b"EXTERNAL"[..].into(),
initial_response: Some(uid_str.into_bytes().into()),
};
future::loop_fn((auth, initial_cmd), |(auth, cmd)| {
auth.send(cmd)
.map_err(|err| (err.into(), None))
.and_then(|auth| auth.into_future()
.map_err(|(err, auth)| (err.into(), Some(auth))))
.and_then(|(response, auth)| {
match response {
Some(ServerCommand::Ok { server_guid }) => Ok(Loop::Break((server_guid, auth))),
Some(ServerCommand::Rejected { supported_mechanisms }) => {
Err((AuthError::Rejected { supported_mechanisms: supported_mechanisms },
Some(auth)))
}
Some(ServerCommand::Error) => Ok(Loop::Continue((auth, ClientCommand::Cancel))),
Some(_) => Ok(Loop::Continue((auth, ClientCommand::Error(None)))),
None => {
Err((Error::new(ErrorKind::UnexpectedEof,
"unexpected EOF during authentication")
.into(),
Some(auth)))
}
}
})
})
}
|
{
match *self {
AuthError::Io(ref err) => err.fmt(f),
AuthError::Rejected { .. } => {
write!(f,
"Authentication attempt was rejected by the D-Bus server.")
}
}
}
|
identifier_body
|
strategies.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public License,
// v. 2.0. If a copy of the MPL was not distributed with this file, You can
// obtain one at http://mozilla.org/MPL/2.0/.
use futures::{future, Future, Sink, Stream};
use futures::future::Loop;
use libc;
use std::error;
use std::fmt::{self, Display, Formatter};
use std::io::{Error, ErrorKind};
use auth::client::Authenticator;
use auth::commands::{ClientCommand, ServerCommand, ServerGuid};
#[derive(Debug)]
pub enum AuthError {
Io(Error),
Rejected { supported_mechanisms: Vec<Vec<u8>> },
}
impl Display for AuthError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
AuthError::Io(ref err) => err.fmt(f),
AuthError::Rejected {.. } => {
write!(f,
"Authentication attempt was rejected by the D-Bus server.")
}
}
}
}
impl error::Error for AuthError {
fn description(&self) -> &str {
match *self {
AuthError::Io(ref err) => err.description(),
AuthError::Rejected {.. } => "D-Bus authentication rejected",
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
AuthError::Io(ref err) => Some(err),
AuthError::Rejected {.. } => None,
}
}
}
impl From<AuthError> for Error {
fn
|
(err: AuthError) -> Self {
match err {
AuthError::Io(e) => e,
AuthError::Rejected {.. } => {
Error::new(ErrorKind::PermissionDenied, "D-Bus authentication rejected")
}
}
}
}
impl From<Error> for AuthError {
fn from(err: Error) -> Self {
AuthError::Io(err)
}
}
pub fn auth_external
(auth: Authenticator)
-> impl Future<Item = (ServerGuid, Authenticator), Error = (AuthError, Option<Authenticator>)> {
let uid_str = unsafe { libc::getuid().to_string() };
let initial_cmd = ClientCommand::Auth {
mechanism: b"EXTERNAL"[..].into(),
initial_response: Some(uid_str.into_bytes().into()),
};
future::loop_fn((auth, initial_cmd), |(auth, cmd)| {
auth.send(cmd)
.map_err(|err| (err.into(), None))
.and_then(|auth| auth.into_future()
.map_err(|(err, auth)| (err.into(), Some(auth))))
.and_then(|(response, auth)| {
match response {
Some(ServerCommand::Ok { server_guid }) => Ok(Loop::Break((server_guid, auth))),
Some(ServerCommand::Rejected { supported_mechanisms }) => {
Err((AuthError::Rejected { supported_mechanisms: supported_mechanisms },
Some(auth)))
}
Some(ServerCommand::Error) => Ok(Loop::Continue((auth, ClientCommand::Cancel))),
Some(_) => Ok(Loop::Continue((auth, ClientCommand::Error(None)))),
None => {
Err((Error::new(ErrorKind::UnexpectedEof,
"unexpected EOF during authentication")
.into(),
Some(auth)))
}
}
})
})
}
|
from
|
identifier_name
|
strategies.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public License,
// v. 2.0. If a copy of the MPL was not distributed with this file, You can
// obtain one at http://mozilla.org/MPL/2.0/.
use futures::{future, Future, Sink, Stream};
use futures::future::Loop;
use libc;
use std::error;
use std::fmt::{self, Display, Formatter};
use std::io::{Error, ErrorKind};
use auth::client::Authenticator;
use auth::commands::{ClientCommand, ServerCommand, ServerGuid};
#[derive(Debug)]
pub enum AuthError {
Io(Error),
Rejected { supported_mechanisms: Vec<Vec<u8>> },
}
impl Display for AuthError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
AuthError::Io(ref err) => err.fmt(f),
AuthError::Rejected {.. } => {
write!(f,
"Authentication attempt was rejected by the D-Bus server.")
}
}
}
}
impl error::Error for AuthError {
fn description(&self) -> &str {
match *self {
AuthError::Io(ref err) => err.description(),
AuthError::Rejected {.. } => "D-Bus authentication rejected",
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
AuthError::Io(ref err) => Some(err),
AuthError::Rejected {.. } => None,
}
}
}
impl From<AuthError> for Error {
fn from(err: AuthError) -> Self {
match err {
AuthError::Io(e) => e,
AuthError::Rejected {.. } => {
Error::new(ErrorKind::PermissionDenied, "D-Bus authentication rejected")
}
}
}
}
impl From<Error> for AuthError {
fn from(err: Error) -> Self {
AuthError::Io(err)
}
}
pub fn auth_external
(auth: Authenticator)
-> impl Future<Item = (ServerGuid, Authenticator), Error = (AuthError, Option<Authenticator>)> {
let uid_str = unsafe { libc::getuid().to_string() };
let initial_cmd = ClientCommand::Auth {
mechanism: b"EXTERNAL"[..].into(),
initial_response: Some(uid_str.into_bytes().into()),
};
future::loop_fn((auth, initial_cmd), |(auth, cmd)| {
auth.send(cmd)
.map_err(|err| (err.into(), None))
.and_then(|auth| auth.into_future()
.map_err(|(err, auth)| (err.into(), Some(auth))))
.and_then(|(response, auth)| {
match response {
Some(ServerCommand::Ok { server_guid }) => Ok(Loop::Break((server_guid, auth))),
Some(ServerCommand::Rejected { supported_mechanisms }) =>
|
Some(ServerCommand::Error) => Ok(Loop::Continue((auth, ClientCommand::Cancel))),
Some(_) => Ok(Loop::Continue((auth, ClientCommand::Error(None)))),
None => {
Err((Error::new(ErrorKind::UnexpectedEof,
"unexpected EOF during authentication")
.into(),
Some(auth)))
}
}
})
})
}
|
{
Err((AuthError::Rejected { supported_mechanisms: supported_mechanisms },
Some(auth)))
}
|
conditional_block
|
strategies.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public License,
// v. 2.0. If a copy of the MPL was not distributed with this file, You can
// obtain one at http://mozilla.org/MPL/2.0/.
use futures::{future, Future, Sink, Stream};
use futures::future::Loop;
use libc;
use std::error;
use std::fmt::{self, Display, Formatter};
use std::io::{Error, ErrorKind};
use auth::client::Authenticator;
use auth::commands::{ClientCommand, ServerCommand, ServerGuid};
#[derive(Debug)]
pub enum AuthError {
Io(Error),
Rejected { supported_mechanisms: Vec<Vec<u8>> },
}
impl Display for AuthError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
AuthError::Io(ref err) => err.fmt(f),
AuthError::Rejected {.. } => {
write!(f,
"Authentication attempt was rejected by the D-Bus server.")
}
}
}
}
impl error::Error for AuthError {
fn description(&self) -> &str {
match *self {
AuthError::Io(ref err) => err.description(),
AuthError::Rejected {.. } => "D-Bus authentication rejected",
|
fn cause(&self) -> Option<&error::Error> {
match *self {
AuthError::Io(ref err) => Some(err),
AuthError::Rejected {.. } => None,
}
}
}
impl From<AuthError> for Error {
fn from(err: AuthError) -> Self {
match err {
AuthError::Io(e) => e,
AuthError::Rejected {.. } => {
Error::new(ErrorKind::PermissionDenied, "D-Bus authentication rejected")
}
}
}
}
impl From<Error> for AuthError {
fn from(err: Error) -> Self {
AuthError::Io(err)
}
}
pub fn auth_external
(auth: Authenticator)
-> impl Future<Item = (ServerGuid, Authenticator), Error = (AuthError, Option<Authenticator>)> {
let uid_str = unsafe { libc::getuid().to_string() };
let initial_cmd = ClientCommand::Auth {
mechanism: b"EXTERNAL"[..].into(),
initial_response: Some(uid_str.into_bytes().into()),
};
future::loop_fn((auth, initial_cmd), |(auth, cmd)| {
auth.send(cmd)
.map_err(|err| (err.into(), None))
.and_then(|auth| auth.into_future()
.map_err(|(err, auth)| (err.into(), Some(auth))))
.and_then(|(response, auth)| {
match response {
Some(ServerCommand::Ok { server_guid }) => Ok(Loop::Break((server_guid, auth))),
Some(ServerCommand::Rejected { supported_mechanisms }) => {
Err((AuthError::Rejected { supported_mechanisms: supported_mechanisms },
Some(auth)))
}
Some(ServerCommand::Error) => Ok(Loop::Continue((auth, ClientCommand::Cancel))),
Some(_) => Ok(Loop::Continue((auth, ClientCommand::Error(None)))),
None => {
Err((Error::new(ErrorKind::UnexpectedEof,
"unexpected EOF during authentication")
.into(),
Some(auth)))
}
}
})
})
}
|
}
}
|
random_line_split
|
extern-with-type-bounds.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(intrinsics)]
extern "rust-intrinsic" {
// Real example from libcore
fn type_id<T:?Sized +'static>() -> u64;
// Silent bounds made explicit to make sure they are actually
// resolved.
fn transmute<T: Sized, U: Sized>(val: T) -> U;
// Bounds aren't checked right now, so this should work
// even though it's incorrect.
fn size_of<T: Clone>() -> usize;
// Unresolved bounds should still error.
fn align_of<T: NoSuchTrait>() -> usize;
//~^ ERROR cannot find trait `NoSuchTrait` in this scope
}
fn main()
|
{}
|
identifier_body
|
|
extern-with-type-bounds.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(intrinsics)]
extern "rust-intrinsic" {
// Real example from libcore
fn type_id<T:?Sized +'static>() -> u64;
// Silent bounds made explicit to make sure they are actually
// resolved.
fn transmute<T: Sized, U: Sized>(val: T) -> U;
// Bounds aren't checked right now, so this should work
// even though it's incorrect.
fn size_of<T: Clone>() -> usize;
// Unresolved bounds should still error.
fn align_of<T: NoSuchTrait>() -> usize;
//~^ ERROR cannot find trait `NoSuchTrait` in this scope
}
fn main() {}
|
//
|
random_line_split
|
extern-with-type-bounds.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(intrinsics)]
extern "rust-intrinsic" {
// Real example from libcore
fn type_id<T:?Sized +'static>() -> u64;
// Silent bounds made explicit to make sure they are actually
// resolved.
fn transmute<T: Sized, U: Sized>(val: T) -> U;
// Bounds aren't checked right now, so this should work
// even though it's incorrect.
fn size_of<T: Clone>() -> usize;
// Unresolved bounds should still error.
fn align_of<T: NoSuchTrait>() -> usize;
//~^ ERROR cannot find trait `NoSuchTrait` in this scope
}
fn
|
() {}
|
main
|
identifier_name
|
main.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![deny(unused_imports)]
#![deny(unused_variables)]
#![feature(box_syntax)]
#![feature(convert)]
// For FFI
#![allow(non_snake_case, dead_code)]
//! The `servo` test application.
//!
//! Creates a `Browser` instance with a simple implementation of
//! the compositor's `WindowMethods` to create a working web browser.
//!
//! This browser's implementation of `WindowMethods` is built on top
//! of [glutin], the cross-platform OpenGL utility and windowing
//! library.
//!
//! For the engine itself look next door in lib.rs.
//!
//! [glutin]: https://github.com/tomaka/glutin
extern crate servo;
extern crate time;
extern crate util;
extern crate errno;
extern crate compositing;
extern crate script_traits;
extern crate euclid;
extern crate libc;
extern crate msg;
extern crate gleam;
extern crate layers;
extern crate egl;
extern crate url;
extern crate net;
extern crate env_logger;
#[link(name = "stlport")]
extern {}
use util::opts;
use net::resource_task;
use servo::Browser;
use compositing::windowing::WindowEvent;
use std::env;
mod window;
mod input;
struct BrowserWrapper {
browser: Browser,
}
fn main() {
env_logger::init().unwrap();
// Parse the command line options and store them globally
if opts::from_cmdline_args(env::args().collect::<Vec<_>>().as_slice()) {
resource_task::global_init();
let window = if opts::get().headless {
None
} else {
Some(window::Window::new())
};
// Our wrapper around `Browser` that also implements some
// callbacks required by the glutin window implementation.
let mut browser = BrowserWrapper {
browser: Browser::new(window.clone()),
};
match window {
None => (),
Some(ref window) => input::run_input_loop(&window.event_send)
}
browser.browser.handle_events(vec![WindowEvent::InitializeCompositing]);
// Feed events from the window to the browser until the browser
// says to stop.
loop {
let should_continue = match window {
None => browser.browser.handle_events(vec![WindowEvent::Idle]),
Some(ref window) => {
let events = window.wait_events();
browser.browser.handle_events(events)
}
};
if!should_continue {
break
}
}
let BrowserWrapper {
browser
} = browser;
browser.shutdown();
}
|
}
|
random_line_split
|
|
main.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![deny(unused_imports)]
#![deny(unused_variables)]
#![feature(box_syntax)]
#![feature(convert)]
// For FFI
#![allow(non_snake_case, dead_code)]
//! The `servo` test application.
//!
//! Creates a `Browser` instance with a simple implementation of
//! the compositor's `WindowMethods` to create a working web browser.
//!
//! This browser's implementation of `WindowMethods` is built on top
//! of [glutin], the cross-platform OpenGL utility and windowing
//! library.
//!
//! For the engine itself look next door in lib.rs.
//!
//! [glutin]: https://github.com/tomaka/glutin
extern crate servo;
extern crate time;
extern crate util;
extern crate errno;
extern crate compositing;
extern crate script_traits;
extern crate euclid;
extern crate libc;
extern crate msg;
extern crate gleam;
extern crate layers;
extern crate egl;
extern crate url;
extern crate net;
extern crate env_logger;
#[link(name = "stlport")]
extern {}
use util::opts;
use net::resource_task;
use servo::Browser;
use compositing::windowing::WindowEvent;
use std::env;
mod window;
mod input;
struct BrowserWrapper {
browser: Browser,
}
fn
|
() {
env_logger::init().unwrap();
// Parse the command line options and store them globally
if opts::from_cmdline_args(env::args().collect::<Vec<_>>().as_slice()) {
resource_task::global_init();
let window = if opts::get().headless {
None
} else {
Some(window::Window::new())
};
// Our wrapper around `Browser` that also implements some
// callbacks required by the glutin window implementation.
let mut browser = BrowserWrapper {
browser: Browser::new(window.clone()),
};
match window {
None => (),
Some(ref window) => input::run_input_loop(&window.event_send)
}
browser.browser.handle_events(vec![WindowEvent::InitializeCompositing]);
// Feed events from the window to the browser until the browser
// says to stop.
loop {
let should_continue = match window {
None => browser.browser.handle_events(vec![WindowEvent::Idle]),
Some(ref window) => {
let events = window.wait_events();
browser.browser.handle_events(events)
}
};
if!should_continue {
break
}
}
let BrowserWrapper {
browser
} = browser;
browser.shutdown();
}
}
|
main
|
identifier_name
|
project.rs
|
// Examples to illustrate project loading
extern crate r2api;
extern crate r2pipe;
extern crate radeco_lib;
use r2api::api_trait::R2Api;
use r2pipe::R2;
use radeco_lib::frontend::radeco_containers::{FunctionLoader, ModuleLoader, ProjectLoader};
use radeco_lib::frontend::radeco_source::Source;
use std::cell::RefCell;
use std::rc::Rc;
fn main() {
{
let mut r2 = R2::new(Some("/bin/ls")).expect("Failed to load r2");
r2.analyze();
let src: Rc<Source> = Rc::new(Rc::new(RefCell::new(r2)));
let p = ProjectLoader::default()
.path("/bin/ls")
.source(Rc::clone(&src))
.module_loader(
ModuleLoader::default()
.parallel()
.build_ssa()
.build_callgraph()
.load_datarefs()
.function_loader(FunctionLoader::default().include_defaults()),
)
.load();
for m in p.iter() {
for rfn in m.module.iter() {
println!("{:#X}", rfn.function.0);
}
|
}
}
}
|
random_line_split
|
|
project.rs
|
// Examples to illustrate project loading
extern crate r2api;
extern crate r2pipe;
extern crate radeco_lib;
use r2api::api_trait::R2Api;
use r2pipe::R2;
use radeco_lib::frontend::radeco_containers::{FunctionLoader, ModuleLoader, ProjectLoader};
use radeco_lib::frontend::radeco_source::Source;
use std::cell::RefCell;
use std::rc::Rc;
fn
|
() {
{
let mut r2 = R2::new(Some("/bin/ls")).expect("Failed to load r2");
r2.analyze();
let src: Rc<Source> = Rc::new(Rc::new(RefCell::new(r2)));
let p = ProjectLoader::default()
.path("/bin/ls")
.source(Rc::clone(&src))
.module_loader(
ModuleLoader::default()
.parallel()
.build_ssa()
.build_callgraph()
.load_datarefs()
.function_loader(FunctionLoader::default().include_defaults()),
)
.load();
for m in p.iter() {
for rfn in m.module.iter() {
println!("{:#X}", rfn.function.0);
}
}
}
}
|
main
|
identifier_name
|
project.rs
|
// Examples to illustrate project loading
extern crate r2api;
extern crate r2pipe;
extern crate radeco_lib;
use r2api::api_trait::R2Api;
use r2pipe::R2;
use radeco_lib::frontend::radeco_containers::{FunctionLoader, ModuleLoader, ProjectLoader};
use radeco_lib::frontend::radeco_source::Source;
use std::cell::RefCell;
use std::rc::Rc;
fn main()
|
println!("{:#X}", rfn.function.0);
}
}
}
}
|
{
{
let mut r2 = R2::new(Some("/bin/ls")).expect("Failed to load r2");
r2.analyze();
let src: Rc<Source> = Rc::new(Rc::new(RefCell::new(r2)));
let p = ProjectLoader::default()
.path("/bin/ls")
.source(Rc::clone(&src))
.module_loader(
ModuleLoader::default()
.parallel()
.build_ssa()
.build_callgraph()
.load_datarefs()
.function_loader(FunctionLoader::default().include_defaults()),
)
.load();
for m in p.iter() {
for rfn in m.module.iter() {
|
identifier_body
|
macro_example.rs
|
#[macro_use] extern crate nickel;
extern crate regex;
extern crate rustc_serialize;
extern crate hyper;
use std::io::Write;
use nickel::status::StatusCode::{self, NotFound};
use nickel::{
Nickel, NickelError, Continue, Halt, Request, Response, MediaType,
QueryString, JsonBody, StaticFilesHandler, MiddlewareResult, HttpRouter, Action
};
use regex::Regex;
use hyper::header::Location;
#[derive(RustcDecodable, RustcEncodable)]
struct Person {
firstname: String,
lastname: String,
}
//this is an example middleware function that just logs each request
fn logger<'a>(request: &mut Request, response: Response<'a>) -> MiddlewareResult<'a> {
println!("logging request: {:?}", request.origin.uri);
Ok(Continue(response))
}
//this is how to overwrite the default error handler to handle 404 cases with a custom view
fn custom_404<'a>(err: &mut NickelError, _req: &mut Request) -> Action {
if let Some(ref mut res) = err.stream {
if res.status() == NotFound {
let _ = res.write_all(b"<h1>Call the police!</h1>");
return Halt(())
}
}
Continue(())
}
fn main() {
let mut server = Nickel::new();
// middleware is optional and can be registered with `utilize`
server.utilize(logger);
// go to http://localhost:6767/thoughtram_logo_brain.png to see static file serving in action
server.utilize(StaticFilesHandler::new("examples/assets/"));
let hello_regex = Regex::new("/hello/(?P<name>[a-zA-Z]+)").unwrap();
// The return type for a route can be anything that implements `Responder`
server.utilize(router!(
// go to http://localhost:6767/user/4711 to see this route in action
get "/user/:userid" => |request| {
// returning a String
format!("This is user: {}", request.param("userid").unwrap())
}
// go to http://localhost:6767/no_alloc/4711 to see this route in action
get "/no_alloc/:userid" => |request, response| {
// returning a slice of T where T: Display
&["This is user: ", request.param("userid").unwrap()][..]
}
// go to http://localhost:6767/bar to see this route in action
get "/bar" => {
// returning a http status code and a static string
(200u16, "This is the /bar handler")
}
// go to http://localhost:6767/content-type to see this route in action
get "/content-type" => |_, mut response| {
response.set(MediaType::Json);
"{'foo':'bar'}"
}
// go to http://localhost:6767/hello/moomah to see this route in action
get hello_regex => |request| {
format!("Hello {}", request.param("name").unwrap())
}
// go to http://localhost:6767/redirect to see this route in action
get "/redirect" => |_, mut response| {
response.set(Location("http://nickel.rs".into()));
StatusCode::PermanentRedirect
}
// go to http://localhost:6767/private to see this route in action
get "/private" => {
// returning a typed http status and a response body
(StatusCode::Unauthorized, "This is a private place")
}
// go to http://localhost:6767/some/crazy/route to see this route in action
get "/some/*/route" => {
// returning a static string
"This matches /some/crazy/route but not /some/super/crazy/route"
}
// go to http://localhost:6767/a/some/crazy/route to see this route in action
get "/a/**/route" => {
"This matches /a/crazy/route and also /a/super/crazy/route"
}
// try it with curl
// curl 'http://localhost:6767/a/post/request' -H 'Content-Type: application/json;charset=UTF-8' --data-binary $'{ "firstname": "John","lastname": "Connor" }'
post "/a/post/request" => |request| {
let person = request.json_as::<Person>().unwrap();
format!("Hello {} {}", person.firstname, person.lastname)
}
// try calling http://localhost:6767/query?foo=bar
get "/query" => |request| {
let query = request.query();
let foo = query.get("foo").unwrap_or("This is only a default value");
let bar = query.get("bar").unwrap_or("This is only a default value");
let text = format!("<p>Your foo values in the query string are: {:?}\
<p>Your bar values are: {:?}",
foo, bar);
text
}
));
|
server.handle_error(custom_handler);
println!("Running server!");
server.listen("127.0.0.1:6767");
}
|
// issue #20178
let custom_handler: fn(&mut NickelError, &mut Request) -> Action = custom_404;
|
random_line_split
|
macro_example.rs
|
#[macro_use] extern crate nickel;
extern crate regex;
extern crate rustc_serialize;
extern crate hyper;
use std::io::Write;
use nickel::status::StatusCode::{self, NotFound};
use nickel::{
Nickel, NickelError, Continue, Halt, Request, Response, MediaType,
QueryString, JsonBody, StaticFilesHandler, MiddlewareResult, HttpRouter, Action
};
use regex::Regex;
use hyper::header::Location;
#[derive(RustcDecodable, RustcEncodable)]
struct Person {
firstname: String,
lastname: String,
}
//this is an example middleware function that just logs each request
fn logger<'a>(request: &mut Request, response: Response<'a>) -> MiddlewareResult<'a> {
println!("logging request: {:?}", request.origin.uri);
Ok(Continue(response))
}
//this is how to overwrite the default error handler to handle 404 cases with a custom view
fn custom_404<'a>(err: &mut NickelError, _req: &mut Request) -> Action {
if let Some(ref mut res) = err.stream {
if res.status() == NotFound
|
}
Continue(())
}
fn main() {
let mut server = Nickel::new();
// middleware is optional and can be registered with `utilize`
server.utilize(logger);
// go to http://localhost:6767/thoughtram_logo_brain.png to see static file serving in action
server.utilize(StaticFilesHandler::new("examples/assets/"));
let hello_regex = Regex::new("/hello/(?P<name>[a-zA-Z]+)").unwrap();
// The return type for a route can be anything that implements `Responder`
server.utilize(router!(
// go to http://localhost:6767/user/4711 to see this route in action
get "/user/:userid" => |request| {
// returning a String
format!("This is user: {}", request.param("userid").unwrap())
}
// go to http://localhost:6767/no_alloc/4711 to see this route in action
get "/no_alloc/:userid" => |request, response| {
// returning a slice of T where T: Display
&["This is user: ", request.param("userid").unwrap()][..]
}
// go to http://localhost:6767/bar to see this route in action
get "/bar" => {
// returning a http status code and a static string
(200u16, "This is the /bar handler")
}
// go to http://localhost:6767/content-type to see this route in action
get "/content-type" => |_, mut response| {
response.set(MediaType::Json);
"{'foo':'bar'}"
}
// go to http://localhost:6767/hello/moomah to see this route in action
get hello_regex => |request| {
format!("Hello {}", request.param("name").unwrap())
}
// go to http://localhost:6767/redirect to see this route in action
get "/redirect" => |_, mut response| {
response.set(Location("http://nickel.rs".into()));
StatusCode::PermanentRedirect
}
// go to http://localhost:6767/private to see this route in action
get "/private" => {
// returning a typed http status and a response body
(StatusCode::Unauthorized, "This is a private place")
}
// go to http://localhost:6767/some/crazy/route to see this route in action
get "/some/*/route" => {
// returning a static string
"This matches /some/crazy/route but not /some/super/crazy/route"
}
// go to http://localhost:6767/a/some/crazy/route to see this route in action
get "/a/**/route" => {
"This matches /a/crazy/route and also /a/super/crazy/route"
}
// try it with curl
// curl 'http://localhost:6767/a/post/request' -H 'Content-Type: application/json;charset=UTF-8' --data-binary $'{ "firstname": "John","lastname": "Connor" }'
post "/a/post/request" => |request| {
let person = request.json_as::<Person>().unwrap();
format!("Hello {} {}", person.firstname, person.lastname)
}
// try calling http://localhost:6767/query?foo=bar
get "/query" => |request| {
let query = request.query();
let foo = query.get("foo").unwrap_or("This is only a default value");
let bar = query.get("bar").unwrap_or("This is only a default value");
let text = format!("<p>Your foo values in the query string are: {:?}\
<p>Your bar values are: {:?}",
foo, bar);
text
}
));
// issue #20178
let custom_handler: fn(&mut NickelError, &mut Request) -> Action = custom_404;
server.handle_error(custom_handler);
println!("Running server!");
server.listen("127.0.0.1:6767");
}
|
{
let _ = res.write_all(b"<h1>Call the police!</h1>");
return Halt(())
}
|
conditional_block
|
macro_example.rs
|
#[macro_use] extern crate nickel;
extern crate regex;
extern crate rustc_serialize;
extern crate hyper;
use std::io::Write;
use nickel::status::StatusCode::{self, NotFound};
use nickel::{
Nickel, NickelError, Continue, Halt, Request, Response, MediaType,
QueryString, JsonBody, StaticFilesHandler, MiddlewareResult, HttpRouter, Action
};
use regex::Regex;
use hyper::header::Location;
#[derive(RustcDecodable, RustcEncodable)]
struct Person {
firstname: String,
lastname: String,
}
//this is an example middleware function that just logs each request
fn logger<'a>(request: &mut Request, response: Response<'a>) -> MiddlewareResult<'a>
|
//this is how to overwrite the default error handler to handle 404 cases with a custom view
fn custom_404<'a>(err: &mut NickelError, _req: &mut Request) -> Action {
if let Some(ref mut res) = err.stream {
if res.status() == NotFound {
let _ = res.write_all(b"<h1>Call the police!</h1>");
return Halt(())
}
}
Continue(())
}
fn main() {
let mut server = Nickel::new();
// middleware is optional and can be registered with `utilize`
server.utilize(logger);
// go to http://localhost:6767/thoughtram_logo_brain.png to see static file serving in action
server.utilize(StaticFilesHandler::new("examples/assets/"));
let hello_regex = Regex::new("/hello/(?P<name>[a-zA-Z]+)").unwrap();
// The return type for a route can be anything that implements `Responder`
server.utilize(router!(
// go to http://localhost:6767/user/4711 to see this route in action
get "/user/:userid" => |request| {
// returning a String
format!("This is user: {}", request.param("userid").unwrap())
}
// go to http://localhost:6767/no_alloc/4711 to see this route in action
get "/no_alloc/:userid" => |request, response| {
// returning a slice of T where T: Display
&["This is user: ", request.param("userid").unwrap()][..]
}
// go to http://localhost:6767/bar to see this route in action
get "/bar" => {
// returning a http status code and a static string
(200u16, "This is the /bar handler")
}
// go to http://localhost:6767/content-type to see this route in action
get "/content-type" => |_, mut response| {
response.set(MediaType::Json);
"{'foo':'bar'}"
}
// go to http://localhost:6767/hello/moomah to see this route in action
get hello_regex => |request| {
format!("Hello {}", request.param("name").unwrap())
}
// go to http://localhost:6767/redirect to see this route in action
get "/redirect" => |_, mut response| {
response.set(Location("http://nickel.rs".into()));
StatusCode::PermanentRedirect
}
// go to http://localhost:6767/private to see this route in action
get "/private" => {
// returning a typed http status and a response body
(StatusCode::Unauthorized, "This is a private place")
}
// go to http://localhost:6767/some/crazy/route to see this route in action
get "/some/*/route" => {
// returning a static string
"This matches /some/crazy/route but not /some/super/crazy/route"
}
// go to http://localhost:6767/a/some/crazy/route to see this route in action
get "/a/**/route" => {
"This matches /a/crazy/route and also /a/super/crazy/route"
}
// try it with curl
// curl 'http://localhost:6767/a/post/request' -H 'Content-Type: application/json;charset=UTF-8' --data-binary $'{ "firstname": "John","lastname": "Connor" }'
post "/a/post/request" => |request| {
let person = request.json_as::<Person>().unwrap();
format!("Hello {} {}", person.firstname, person.lastname)
}
// try calling http://localhost:6767/query?foo=bar
get "/query" => |request| {
let query = request.query();
let foo = query.get("foo").unwrap_or("This is only a default value");
let bar = query.get("bar").unwrap_or("This is only a default value");
let text = format!("<p>Your foo values in the query string are: {:?}\
<p>Your bar values are: {:?}",
foo, bar);
text
}
));
// issue #20178
let custom_handler: fn(&mut NickelError, &mut Request) -> Action = custom_404;
server.handle_error(custom_handler);
println!("Running server!");
server.listen("127.0.0.1:6767");
}
|
{
println!("logging request: {:?}", request.origin.uri);
Ok(Continue(response))
}
|
identifier_body
|
macro_example.rs
|
#[macro_use] extern crate nickel;
extern crate regex;
extern crate rustc_serialize;
extern crate hyper;
use std::io::Write;
use nickel::status::StatusCode::{self, NotFound};
use nickel::{
Nickel, NickelError, Continue, Halt, Request, Response, MediaType,
QueryString, JsonBody, StaticFilesHandler, MiddlewareResult, HttpRouter, Action
};
use regex::Regex;
use hyper::header::Location;
#[derive(RustcDecodable, RustcEncodable)]
struct Person {
firstname: String,
lastname: String,
}
//this is an example middleware function that just logs each request
fn logger<'a>(request: &mut Request, response: Response<'a>) -> MiddlewareResult<'a> {
println!("logging request: {:?}", request.origin.uri);
Ok(Continue(response))
}
//this is how to overwrite the default error handler to handle 404 cases with a custom view
fn
|
<'a>(err: &mut NickelError, _req: &mut Request) -> Action {
if let Some(ref mut res) = err.stream {
if res.status() == NotFound {
let _ = res.write_all(b"<h1>Call the police!</h1>");
return Halt(())
}
}
Continue(())
}
fn main() {
let mut server = Nickel::new();
// middleware is optional and can be registered with `utilize`
server.utilize(logger);
// go to http://localhost:6767/thoughtram_logo_brain.png to see static file serving in action
server.utilize(StaticFilesHandler::new("examples/assets/"));
let hello_regex = Regex::new("/hello/(?P<name>[a-zA-Z]+)").unwrap();
// The return type for a route can be anything that implements `Responder`
server.utilize(router!(
// go to http://localhost:6767/user/4711 to see this route in action
get "/user/:userid" => |request| {
// returning a String
format!("This is user: {}", request.param("userid").unwrap())
}
// go to http://localhost:6767/no_alloc/4711 to see this route in action
get "/no_alloc/:userid" => |request, response| {
// returning a slice of T where T: Display
&["This is user: ", request.param("userid").unwrap()][..]
}
// go to http://localhost:6767/bar to see this route in action
get "/bar" => {
// returning a http status code and a static string
(200u16, "This is the /bar handler")
}
// go to http://localhost:6767/content-type to see this route in action
get "/content-type" => |_, mut response| {
response.set(MediaType::Json);
"{'foo':'bar'}"
}
// go to http://localhost:6767/hello/moomah to see this route in action
get hello_regex => |request| {
format!("Hello {}", request.param("name").unwrap())
}
// go to http://localhost:6767/redirect to see this route in action
get "/redirect" => |_, mut response| {
response.set(Location("http://nickel.rs".into()));
StatusCode::PermanentRedirect
}
// go to http://localhost:6767/private to see this route in action
get "/private" => {
// returning a typed http status and a response body
(StatusCode::Unauthorized, "This is a private place")
}
// go to http://localhost:6767/some/crazy/route to see this route in action
get "/some/*/route" => {
// returning a static string
"This matches /some/crazy/route but not /some/super/crazy/route"
}
// go to http://localhost:6767/a/some/crazy/route to see this route in action
get "/a/**/route" => {
"This matches /a/crazy/route and also /a/super/crazy/route"
}
// try it with curl
// curl 'http://localhost:6767/a/post/request' -H 'Content-Type: application/json;charset=UTF-8' --data-binary $'{ "firstname": "John","lastname": "Connor" }'
post "/a/post/request" => |request| {
let person = request.json_as::<Person>().unwrap();
format!("Hello {} {}", person.firstname, person.lastname)
}
// try calling http://localhost:6767/query?foo=bar
get "/query" => |request| {
let query = request.query();
let foo = query.get("foo").unwrap_or("This is only a default value");
let bar = query.get("bar").unwrap_or("This is only a default value");
let text = format!("<p>Your foo values in the query string are: {:?}\
<p>Your bar values are: {:?}",
foo, bar);
text
}
));
// issue #20178
let custom_handler: fn(&mut NickelError, &mut Request) -> Action = custom_404;
server.handle_error(custom_handler);
println!("Running server!");
server.listen("127.0.0.1:6767");
}
|
custom_404
|
identifier_name
|
compound.rs
|
//!
//! Shape composed from the union of primitives.
//!
use crate::bounding_volume::{BoundingVolume, AABB};
use crate::math::Isometry;
use crate::partitioning::{BVHImpl, BVT};
use crate::query::{Contact, ContactKinematic, ContactPrediction, ContactPreprocessor};
use crate::shape::{CompositeShape, FeatureId, Shape, ShapeHandle};
use na::{self, RealField};
use std::mem;
/// A compound shape with an aabb bounding volume.
///
/// A compound shape is a shape composed of the union of several simpler shape. This is
/// the main way of creating a concave shape from convex parts. Each parts can have its own
/// delta transformation to shift or rotate it with regard to the other shapes.
#[derive(Clone)]
pub struct Compound<N: RealField> {
shapes: Vec<(Isometry<N>, ShapeHandle<N>)>,
bvt: BVT<usize, AABB<N>>,
bvs: Vec<AABB<N>>,
nbits: usize,
}
impl<N: RealField> Compound<N> {
/// Builds a new compound shape.
pub fn new(shapes: Vec<(Isometry<N>, ShapeHandle<N>)>) -> Compound<N> {
let mut bvs = Vec::new();
let mut leaves = Vec::new();
for (i, &(ref delta, ref shape)) in shapes.iter().enumerate() {
// loosen for better persistancy
let bv = shape.as_ref().aabb(delta).loosened(na::convert(0.04f64));
bvs.push(bv.clone());
leaves.push((i, bv));
if let Some(_comp) = shape.as_composite_shape() {
panic!("Nested composite shapes are not allowed.");
}
}
let nbits = mem::size_of::<usize>() * 8 - leaves.len().leading_zeros() as usize;
let bvt = BVT::new_balanced(leaves);
Compound {
shapes: shapes,
bvt: bvt,
bvs: bvs,
nbits,
}
}
}
impl<N: RealField> Compound<N> {
/// The shapes of this compound shape.
#[inline]
pub fn shapes(&self) -> &[(Isometry<N>, ShapeHandle<N>)] {
&self.shapes[..]
}
/// The optimization structure used by this compound shape.
#[inline]
pub fn bvt(&self) -> &BVT<usize, AABB<N>> {
&self.bvt
}
/// The AABB of this compound in its local-space.
#[inline]
pub fn aabb(&self) -> &AABB<N> {
self.bvt()
.root_bounding_volume()
.expect("An empty Compound has no AABB.")
}
/// The shapes bounding volumes.
#[inline]
pub fn bounding_volumes(&self) -> &[AABB<N>] {
&self.bvs[..]
}
/// The AABB of the i-th shape compositing this compound.
#[inline]
pub fn aabb_at(&self, i: usize) -> &AABB<N> {
&self.bvs[i]
}
/// Transforms a FeatureId of this compound into a pair containing the index of the subshape
/// containing this feature, and the corresponding FeatureId on this subshape.
pub fn subshape_feature_id(&self, fid: FeatureId) -> (usize, FeatureId) {
match fid {
FeatureId::Face(i) => (
(i &!(usize::max_value() << self.nbits)),
FeatureId::Face(i >> self.nbits),
),
#[cfg(feature = "dim3")]
FeatureId::Edge(i) => (
(i &!(usize::max_value() << self.nbits)),
FeatureId::Edge(i >> self.nbits),
),
FeatureId::Vertex(i) => (
(i &!(usize::max_value() << self.nbits)),
FeatureId::Vertex(i >> self.nbits),
),
FeatureId::Unknown => (0, FeatureId::Unknown),
}
}
}
impl<N: RealField> CompositeShape<N> for Compound<N> {
#[inline]
fn nparts(&self) -> usize {
self.shapes.len()
}
#[inline(always)]
fn map_part_at(
&self,
i: usize,
m: &Isometry<N>,
f: &mut dyn FnMut(&Isometry<N>, &dyn Shape<N>),
)
|
fn map_part_and_preprocessor_at(
&self,
i: usize,
m: &Isometry<N>,
_prediction: &ContactPrediction<N>,
f: &mut dyn FnMut(&Isometry<N>, &dyn Shape<N>, &dyn ContactPreprocessor<N>),
) {
let elt = &self.shapes()[i];
let pos = m * elt.0;
let proc = CompoundContactProcessor::new(&elt.0, i, self.nbits);
f(&pos, elt.1.as_ref(), &proc)
}
#[inline]
fn aabb_at(&self, i: usize) -> AABB<N> {
self.bounding_volumes()[i].clone()
}
#[inline]
fn bvh(&self) -> BVHImpl<N, usize, AABB<N>> {
BVHImpl::BVT(&self.bvt)
}
}
struct CompoundContactProcessor<'a, N: RealField> {
part_pos: &'a Isometry<N>,
part_id: usize,
nbits: usize,
}
impl<'a, N: RealField> CompoundContactProcessor<'a, N> {
pub fn new(part_pos: &'a Isometry<N>, part_id: usize, nbits: usize) -> Self {
CompoundContactProcessor {
part_pos,
part_id,
nbits,
}
}
}
impl<'a, N: RealField> ContactPreprocessor<N> for CompoundContactProcessor<'a, N> {
fn process_contact(
&self,
_c: &mut Contact<N>,
kinematic: &mut ContactKinematic<N>,
is_first: bool,
) -> bool {
// Fix the feature ID.
let feature = if is_first {
kinematic.feature1()
} else {
kinematic.feature2()
};
let actual_feature = match feature {
FeatureId::Vertex(i) => FeatureId::Vertex((i << self.nbits) | self.part_id),
#[cfg(feature = "dim3")]
FeatureId::Edge(i) => FeatureId::Edge((i << self.nbits) | self.part_id),
FeatureId::Face(i) => FeatureId::Face((i << self.nbits) | self.part_id),
FeatureId::Unknown => return false,
};
if is_first {
kinematic.set_feature1(actual_feature);
// The contact kinematics must be expressed on the local frame of
// the compound instead of the sub-shape.
kinematic.transform1(self.part_pos);
} else {
kinematic.set_feature2(actual_feature);
// The contact kinematics must be expressed on the local frame of
// the compound instead of the sub-shape.
kinematic.transform2(self.part_pos);
}
true
}
}
|
{
let elt = &self.shapes()[i];
let pos = m * elt.0;
f(&pos, elt.1.as_ref())
}
|
identifier_body
|
compound.rs
|
//!
//! Shape composed from the union of primitives.
//!
use crate::bounding_volume::{BoundingVolume, AABB};
use crate::math::Isometry;
use crate::partitioning::{BVHImpl, BVT};
use crate::query::{Contact, ContactKinematic, ContactPrediction, ContactPreprocessor};
use crate::shape::{CompositeShape, FeatureId, Shape, ShapeHandle};
use na::{self, RealField};
use std::mem;
/// A compound shape with an aabb bounding volume.
///
/// A compound shape is a shape composed of the union of several simpler shape. This is
/// the main way of creating a concave shape from convex parts. Each parts can have its own
/// delta transformation to shift or rotate it with regard to the other shapes.
#[derive(Clone)]
pub struct Compound<N: RealField> {
shapes: Vec<(Isometry<N>, ShapeHandle<N>)>,
bvt: BVT<usize, AABB<N>>,
bvs: Vec<AABB<N>>,
nbits: usize,
}
impl<N: RealField> Compound<N> {
/// Builds a new compound shape.
pub fn new(shapes: Vec<(Isometry<N>, ShapeHandle<N>)>) -> Compound<N> {
let mut bvs = Vec::new();
let mut leaves = Vec::new();
for (i, &(ref delta, ref shape)) in shapes.iter().enumerate() {
// loosen for better persistancy
let bv = shape.as_ref().aabb(delta).loosened(na::convert(0.04f64));
bvs.push(bv.clone());
leaves.push((i, bv));
if let Some(_comp) = shape.as_composite_shape() {
panic!("Nested composite shapes are not allowed.");
}
}
let nbits = mem::size_of::<usize>() * 8 - leaves.len().leading_zeros() as usize;
let bvt = BVT::new_balanced(leaves);
Compound {
shapes: shapes,
bvt: bvt,
bvs: bvs,
nbits,
}
}
}
impl<N: RealField> Compound<N> {
/// The shapes of this compound shape.
#[inline]
pub fn shapes(&self) -> &[(Isometry<N>, ShapeHandle<N>)] {
&self.shapes[..]
}
/// The optimization structure used by this compound shape.
#[inline]
pub fn bvt(&self) -> &BVT<usize, AABB<N>> {
&self.bvt
}
/// The AABB of this compound in its local-space.
#[inline]
pub fn aabb(&self) -> &AABB<N> {
self.bvt()
.root_bounding_volume()
.expect("An empty Compound has no AABB.")
|
/// The shapes bounding volumes.
#[inline]
pub fn bounding_volumes(&self) -> &[AABB<N>] {
&self.bvs[..]
}
/// The AABB of the i-th shape compositing this compound.
#[inline]
pub fn aabb_at(&self, i: usize) -> &AABB<N> {
&self.bvs[i]
}
/// Transforms a FeatureId of this compound into a pair containing the index of the subshape
/// containing this feature, and the corresponding FeatureId on this subshape.
pub fn subshape_feature_id(&self, fid: FeatureId) -> (usize, FeatureId) {
match fid {
FeatureId::Face(i) => (
(i &!(usize::max_value() << self.nbits)),
FeatureId::Face(i >> self.nbits),
),
#[cfg(feature = "dim3")]
FeatureId::Edge(i) => (
(i &!(usize::max_value() << self.nbits)),
FeatureId::Edge(i >> self.nbits),
),
FeatureId::Vertex(i) => (
(i &!(usize::max_value() << self.nbits)),
FeatureId::Vertex(i >> self.nbits),
),
FeatureId::Unknown => (0, FeatureId::Unknown),
}
}
}
impl<N: RealField> CompositeShape<N> for Compound<N> {
#[inline]
fn nparts(&self) -> usize {
self.shapes.len()
}
#[inline(always)]
fn map_part_at(
&self,
i: usize,
m: &Isometry<N>,
f: &mut dyn FnMut(&Isometry<N>, &dyn Shape<N>),
) {
let elt = &self.shapes()[i];
let pos = m * elt.0;
f(&pos, elt.1.as_ref())
}
fn map_part_and_preprocessor_at(
&self,
i: usize,
m: &Isometry<N>,
_prediction: &ContactPrediction<N>,
f: &mut dyn FnMut(&Isometry<N>, &dyn Shape<N>, &dyn ContactPreprocessor<N>),
) {
let elt = &self.shapes()[i];
let pos = m * elt.0;
let proc = CompoundContactProcessor::new(&elt.0, i, self.nbits);
f(&pos, elt.1.as_ref(), &proc)
}
#[inline]
fn aabb_at(&self, i: usize) -> AABB<N> {
self.bounding_volumes()[i].clone()
}
#[inline]
fn bvh(&self) -> BVHImpl<N, usize, AABB<N>> {
BVHImpl::BVT(&self.bvt)
}
}
struct CompoundContactProcessor<'a, N: RealField> {
part_pos: &'a Isometry<N>,
part_id: usize,
nbits: usize,
}
impl<'a, N: RealField> CompoundContactProcessor<'a, N> {
pub fn new(part_pos: &'a Isometry<N>, part_id: usize, nbits: usize) -> Self {
CompoundContactProcessor {
part_pos,
part_id,
nbits,
}
}
}
impl<'a, N: RealField> ContactPreprocessor<N> for CompoundContactProcessor<'a, N> {
fn process_contact(
&self,
_c: &mut Contact<N>,
kinematic: &mut ContactKinematic<N>,
is_first: bool,
) -> bool {
// Fix the feature ID.
let feature = if is_first {
kinematic.feature1()
} else {
kinematic.feature2()
};
let actual_feature = match feature {
FeatureId::Vertex(i) => FeatureId::Vertex((i << self.nbits) | self.part_id),
#[cfg(feature = "dim3")]
FeatureId::Edge(i) => FeatureId::Edge((i << self.nbits) | self.part_id),
FeatureId::Face(i) => FeatureId::Face((i << self.nbits) | self.part_id),
FeatureId::Unknown => return false,
};
if is_first {
kinematic.set_feature1(actual_feature);
// The contact kinematics must be expressed on the local frame of
// the compound instead of the sub-shape.
kinematic.transform1(self.part_pos);
} else {
kinematic.set_feature2(actual_feature);
// The contact kinematics must be expressed on the local frame of
// the compound instead of the sub-shape.
kinematic.transform2(self.part_pos);
}
true
}
}
|
}
|
random_line_split
|
compound.rs
|
//!
//! Shape composed from the union of primitives.
//!
use crate::bounding_volume::{BoundingVolume, AABB};
use crate::math::Isometry;
use crate::partitioning::{BVHImpl, BVT};
use crate::query::{Contact, ContactKinematic, ContactPrediction, ContactPreprocessor};
use crate::shape::{CompositeShape, FeatureId, Shape, ShapeHandle};
use na::{self, RealField};
use std::mem;
/// A compound shape with an aabb bounding volume.
///
/// A compound shape is a shape composed of the union of several simpler shape. This is
/// the main way of creating a concave shape from convex parts. Each parts can have its own
/// delta transformation to shift or rotate it with regard to the other shapes.
#[derive(Clone)]
pub struct Compound<N: RealField> {
shapes: Vec<(Isometry<N>, ShapeHandle<N>)>,
bvt: BVT<usize, AABB<N>>,
bvs: Vec<AABB<N>>,
nbits: usize,
}
impl<N: RealField> Compound<N> {
/// Builds a new compound shape.
pub fn new(shapes: Vec<(Isometry<N>, ShapeHandle<N>)>) -> Compound<N> {
let mut bvs = Vec::new();
let mut leaves = Vec::new();
for (i, &(ref delta, ref shape)) in shapes.iter().enumerate() {
// loosen for better persistancy
let bv = shape.as_ref().aabb(delta).loosened(na::convert(0.04f64));
bvs.push(bv.clone());
leaves.push((i, bv));
if let Some(_comp) = shape.as_composite_shape() {
panic!("Nested composite shapes are not allowed.");
}
}
let nbits = mem::size_of::<usize>() * 8 - leaves.len().leading_zeros() as usize;
let bvt = BVT::new_balanced(leaves);
Compound {
shapes: shapes,
bvt: bvt,
bvs: bvs,
nbits,
}
}
}
impl<N: RealField> Compound<N> {
/// The shapes of this compound shape.
#[inline]
pub fn shapes(&self) -> &[(Isometry<N>, ShapeHandle<N>)] {
&self.shapes[..]
}
/// The optimization structure used by this compound shape.
#[inline]
pub fn bvt(&self) -> &BVT<usize, AABB<N>> {
&self.bvt
}
/// The AABB of this compound in its local-space.
#[inline]
pub fn aabb(&self) -> &AABB<N> {
self.bvt()
.root_bounding_volume()
.expect("An empty Compound has no AABB.")
}
/// The shapes bounding volumes.
#[inline]
pub fn bounding_volumes(&self) -> &[AABB<N>] {
&self.bvs[..]
}
/// The AABB of the i-th shape compositing this compound.
#[inline]
pub fn aabb_at(&self, i: usize) -> &AABB<N> {
&self.bvs[i]
}
/// Transforms a FeatureId of this compound into a pair containing the index of the subshape
/// containing this feature, and the corresponding FeatureId on this subshape.
pub fn subshape_feature_id(&self, fid: FeatureId) -> (usize, FeatureId) {
match fid {
FeatureId::Face(i) => (
(i &!(usize::max_value() << self.nbits)),
FeatureId::Face(i >> self.nbits),
),
#[cfg(feature = "dim3")]
FeatureId::Edge(i) => (
(i &!(usize::max_value() << self.nbits)),
FeatureId::Edge(i >> self.nbits),
),
FeatureId::Vertex(i) => (
(i &!(usize::max_value() << self.nbits)),
FeatureId::Vertex(i >> self.nbits),
),
FeatureId::Unknown => (0, FeatureId::Unknown),
}
}
}
impl<N: RealField> CompositeShape<N> for Compound<N> {
#[inline]
fn nparts(&self) -> usize {
self.shapes.len()
}
#[inline(always)]
fn map_part_at(
&self,
i: usize,
m: &Isometry<N>,
f: &mut dyn FnMut(&Isometry<N>, &dyn Shape<N>),
) {
let elt = &self.shapes()[i];
let pos = m * elt.0;
f(&pos, elt.1.as_ref())
}
fn map_part_and_preprocessor_at(
&self,
i: usize,
m: &Isometry<N>,
_prediction: &ContactPrediction<N>,
f: &mut dyn FnMut(&Isometry<N>, &dyn Shape<N>, &dyn ContactPreprocessor<N>),
) {
let elt = &self.shapes()[i];
let pos = m * elt.0;
let proc = CompoundContactProcessor::new(&elt.0, i, self.nbits);
f(&pos, elt.1.as_ref(), &proc)
}
#[inline]
fn
|
(&self, i: usize) -> AABB<N> {
self.bounding_volumes()[i].clone()
}
#[inline]
fn bvh(&self) -> BVHImpl<N, usize, AABB<N>> {
BVHImpl::BVT(&self.bvt)
}
}
struct CompoundContactProcessor<'a, N: RealField> {
part_pos: &'a Isometry<N>,
part_id: usize,
nbits: usize,
}
impl<'a, N: RealField> CompoundContactProcessor<'a, N> {
pub fn new(part_pos: &'a Isometry<N>, part_id: usize, nbits: usize) -> Self {
CompoundContactProcessor {
part_pos,
part_id,
nbits,
}
}
}
impl<'a, N: RealField> ContactPreprocessor<N> for CompoundContactProcessor<'a, N> {
fn process_contact(
&self,
_c: &mut Contact<N>,
kinematic: &mut ContactKinematic<N>,
is_first: bool,
) -> bool {
// Fix the feature ID.
let feature = if is_first {
kinematic.feature1()
} else {
kinematic.feature2()
};
let actual_feature = match feature {
FeatureId::Vertex(i) => FeatureId::Vertex((i << self.nbits) | self.part_id),
#[cfg(feature = "dim3")]
FeatureId::Edge(i) => FeatureId::Edge((i << self.nbits) | self.part_id),
FeatureId::Face(i) => FeatureId::Face((i << self.nbits) | self.part_id),
FeatureId::Unknown => return false,
};
if is_first {
kinematic.set_feature1(actual_feature);
// The contact kinematics must be expressed on the local frame of
// the compound instead of the sub-shape.
kinematic.transform1(self.part_pos);
} else {
kinematic.set_feature2(actual_feature);
// The contact kinematics must be expressed on the local frame of
// the compound instead of the sub-shape.
kinematic.transform2(self.part_pos);
}
true
}
}
|
aabb_at
|
identifier_name
|
compound.rs
|
//!
//! Shape composed from the union of primitives.
//!
use crate::bounding_volume::{BoundingVolume, AABB};
use crate::math::Isometry;
use crate::partitioning::{BVHImpl, BVT};
use crate::query::{Contact, ContactKinematic, ContactPrediction, ContactPreprocessor};
use crate::shape::{CompositeShape, FeatureId, Shape, ShapeHandle};
use na::{self, RealField};
use std::mem;
/// A compound shape with an aabb bounding volume.
///
/// A compound shape is a shape composed of the union of several simpler shape. This is
/// the main way of creating a concave shape from convex parts. Each parts can have its own
/// delta transformation to shift or rotate it with regard to the other shapes.
#[derive(Clone)]
pub struct Compound<N: RealField> {
shapes: Vec<(Isometry<N>, ShapeHandle<N>)>,
bvt: BVT<usize, AABB<N>>,
bvs: Vec<AABB<N>>,
nbits: usize,
}
impl<N: RealField> Compound<N> {
/// Builds a new compound shape.
pub fn new(shapes: Vec<(Isometry<N>, ShapeHandle<N>)>) -> Compound<N> {
let mut bvs = Vec::new();
let mut leaves = Vec::new();
for (i, &(ref delta, ref shape)) in shapes.iter().enumerate() {
// loosen for better persistancy
let bv = shape.as_ref().aabb(delta).loosened(na::convert(0.04f64));
bvs.push(bv.clone());
leaves.push((i, bv));
if let Some(_comp) = shape.as_composite_shape() {
panic!("Nested composite shapes are not allowed.");
}
}
let nbits = mem::size_of::<usize>() * 8 - leaves.len().leading_zeros() as usize;
let bvt = BVT::new_balanced(leaves);
Compound {
shapes: shapes,
bvt: bvt,
bvs: bvs,
nbits,
}
}
}
impl<N: RealField> Compound<N> {
/// The shapes of this compound shape.
#[inline]
pub fn shapes(&self) -> &[(Isometry<N>, ShapeHandle<N>)] {
&self.shapes[..]
}
/// The optimization structure used by this compound shape.
#[inline]
pub fn bvt(&self) -> &BVT<usize, AABB<N>> {
&self.bvt
}
/// The AABB of this compound in its local-space.
#[inline]
pub fn aabb(&self) -> &AABB<N> {
self.bvt()
.root_bounding_volume()
.expect("An empty Compound has no AABB.")
}
/// The shapes bounding volumes.
#[inline]
pub fn bounding_volumes(&self) -> &[AABB<N>] {
&self.bvs[..]
}
/// The AABB of the i-th shape compositing this compound.
#[inline]
pub fn aabb_at(&self, i: usize) -> &AABB<N> {
&self.bvs[i]
}
/// Transforms a FeatureId of this compound into a pair containing the index of the subshape
/// containing this feature, and the corresponding FeatureId on this subshape.
pub fn subshape_feature_id(&self, fid: FeatureId) -> (usize, FeatureId) {
match fid {
FeatureId::Face(i) => (
(i &!(usize::max_value() << self.nbits)),
FeatureId::Face(i >> self.nbits),
),
#[cfg(feature = "dim3")]
FeatureId::Edge(i) => (
(i &!(usize::max_value() << self.nbits)),
FeatureId::Edge(i >> self.nbits),
),
FeatureId::Vertex(i) => (
(i &!(usize::max_value() << self.nbits)),
FeatureId::Vertex(i >> self.nbits),
),
FeatureId::Unknown => (0, FeatureId::Unknown),
}
}
}
impl<N: RealField> CompositeShape<N> for Compound<N> {
#[inline]
fn nparts(&self) -> usize {
self.shapes.len()
}
#[inline(always)]
fn map_part_at(
&self,
i: usize,
m: &Isometry<N>,
f: &mut dyn FnMut(&Isometry<N>, &dyn Shape<N>),
) {
let elt = &self.shapes()[i];
let pos = m * elt.0;
f(&pos, elt.1.as_ref())
}
fn map_part_and_preprocessor_at(
&self,
i: usize,
m: &Isometry<N>,
_prediction: &ContactPrediction<N>,
f: &mut dyn FnMut(&Isometry<N>, &dyn Shape<N>, &dyn ContactPreprocessor<N>),
) {
let elt = &self.shapes()[i];
let pos = m * elt.0;
let proc = CompoundContactProcessor::new(&elt.0, i, self.nbits);
f(&pos, elt.1.as_ref(), &proc)
}
#[inline]
fn aabb_at(&self, i: usize) -> AABB<N> {
self.bounding_volumes()[i].clone()
}
#[inline]
fn bvh(&self) -> BVHImpl<N, usize, AABB<N>> {
BVHImpl::BVT(&self.bvt)
}
}
struct CompoundContactProcessor<'a, N: RealField> {
part_pos: &'a Isometry<N>,
part_id: usize,
nbits: usize,
}
impl<'a, N: RealField> CompoundContactProcessor<'a, N> {
pub fn new(part_pos: &'a Isometry<N>, part_id: usize, nbits: usize) -> Self {
CompoundContactProcessor {
part_pos,
part_id,
nbits,
}
}
}
impl<'a, N: RealField> ContactPreprocessor<N> for CompoundContactProcessor<'a, N> {
fn process_contact(
&self,
_c: &mut Contact<N>,
kinematic: &mut ContactKinematic<N>,
is_first: bool,
) -> bool {
// Fix the feature ID.
let feature = if is_first
|
else {
kinematic.feature2()
};
let actual_feature = match feature {
FeatureId::Vertex(i) => FeatureId::Vertex((i << self.nbits) | self.part_id),
#[cfg(feature = "dim3")]
FeatureId::Edge(i) => FeatureId::Edge((i << self.nbits) | self.part_id),
FeatureId::Face(i) => FeatureId::Face((i << self.nbits) | self.part_id),
FeatureId::Unknown => return false,
};
if is_first {
kinematic.set_feature1(actual_feature);
// The contact kinematics must be expressed on the local frame of
// the compound instead of the sub-shape.
kinematic.transform1(self.part_pos);
} else {
kinematic.set_feature2(actual_feature);
// The contact kinematics must be expressed on the local frame of
// the compound instead of the sub-shape.
kinematic.transform2(self.part_pos);
}
true
}
}
|
{
kinematic.feature1()
}
|
conditional_block
|
windowing.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Abstract windowing methods. The concrete implementations of these can be found in `platform/`.
use embedder_traits::EventLoopWaker;
use euclid::TypedScale;
#[cfg(feature = "gleam")]
use gleam::gl;
use keyboard_types::KeyboardEvent;
use msg::constellation_msg::{TopLevelBrowsingContextId, TraversalDirection};
use script_traits::{MouseButton, TouchEventType, TouchId};
use servo_geometry::{DeviceIndependentPixel, DeviceUintLength};
use servo_url::ServoUrl;
use std::fmt::{Debug, Error, Formatter};
#[cfg(feature = "gleam")]
use std::rc::Rc;
use style_traits::DevicePixel;
use webrender_api::{DeviceIntPoint, DevicePoint, DeviceUintSize, DeviceUintRect, ScrollLocation};
#[derive(Clone)]
pub enum MouseWindowEvent {
Click(MouseButton, DevicePoint),
MouseDown(MouseButton, DevicePoint),
MouseUp(MouseButton, DevicePoint),
}
/// Various debug and profiling flags that WebRender supports.
#[derive(Clone)]
pub enum WebRenderDebugOption {
Profiler,
TextureCacheDebug,
RenderTargetDebug,
}
/// Events that the windowing system sends to Servo.
#[derive(Clone)]
pub enum WindowEvent {
/// Sent when no message has arrived, but the event loop was kicked for some reason (perhaps
/// by another Servo subsystem).
///
/// FIXME(pcwalton): This is kind of ugly and may not work well with multiprocess Servo.
/// It's possible that this should be something like
/// `CompositorMessageWindowEvent(compositor_thread::Msg)` instead.
Idle,
/// Sent when part of the window is marked dirty and needs to be redrawn. Before sending this
/// message, the window must make the same GL context as in `PrepareRenderingEvent` current.
Refresh,
/// Sent when the window is resized.
Resize,
/// Sent when a new URL is to be loaded.
LoadUrl(TopLevelBrowsingContextId, ServoUrl),
/// Sent when a mouse hit test is to be performed.
MouseWindowEventClass(MouseWindowEvent),
/// Sent when a mouse move.
MouseWindowMoveEventClass(DevicePoint),
/// Touch event: type, identifier, point
Touch(TouchEventType, TouchId, DevicePoint),
/// Sent when the user scrolls. The first point is the delta and the second point is the
/// origin.
Scroll(ScrollLocation, DeviceIntPoint, TouchEventType),
/// Sent when the user zooms.
Zoom(f32),
/// Simulated "pinch zoom" gesture for non-touch platforms (e.g. ctrl-scrollwheel).
PinchZoom(f32),
/// Sent when the user resets zoom to default.
ResetZoom,
/// Sent when the user uses chrome navigation (i.e. backspace or shift-backspace).
Navigation(TopLevelBrowsingContextId, TraversalDirection),
/// Sent when the user quits the application
Quit,
/// Sent when a key input state changes
Keyboard(KeyboardEvent),
/// Sent when Ctr+R/Apple+R is called to reload the current page.
Reload(TopLevelBrowsingContextId),
/// Create a new top level browsing context
NewBrowser(ServoUrl, TopLevelBrowsingContextId),
/// Close a top level browsing context
CloseBrowser(TopLevelBrowsingContextId),
/// Panic a top level browsing context.
SendError(Option<TopLevelBrowsingContextId>, String),
/// Make a top level browsing context visible, hiding the previous
|
ToggleWebRenderDebug(WebRenderDebugOption),
/// Capture current WebRender
CaptureWebRender,
}
impl Debug for WindowEvent {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
match *self {
WindowEvent::Idle => write!(f, "Idle"),
WindowEvent::Refresh => write!(f, "Refresh"),
WindowEvent::Resize => write!(f, "Resize"),
WindowEvent::Keyboard(..) => write!(f, "Keyboard"),
WindowEvent::LoadUrl(..) => write!(f, "LoadUrl"),
WindowEvent::MouseWindowEventClass(..) => write!(f, "Mouse"),
WindowEvent::MouseWindowMoveEventClass(..) => write!(f, "MouseMove"),
WindowEvent::Touch(..) => write!(f, "Touch"),
WindowEvent::Scroll(..) => write!(f, "Scroll"),
WindowEvent::Zoom(..) => write!(f, "Zoom"),
WindowEvent::PinchZoom(..) => write!(f, "PinchZoom"),
WindowEvent::ResetZoom => write!(f, "ResetZoom"),
WindowEvent::Navigation(..) => write!(f, "Navigation"),
WindowEvent::Quit => write!(f, "Quit"),
WindowEvent::Reload(..) => write!(f, "Reload"),
WindowEvent::NewBrowser(..) => write!(f, "NewBrowser"),
WindowEvent::SendError(..) => write!(f, "SendError"),
WindowEvent::CloseBrowser(..) => write!(f, "CloseBrowser"),
WindowEvent::SelectBrowser(..) => write!(f, "SelectBrowser"),
WindowEvent::ToggleWebRenderDebug(..) => write!(f, "ToggleWebRenderDebug"),
WindowEvent::CaptureWebRender => write!(f, "CaptureWebRender"),
}
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum AnimationState {
Idle,
Animating,
}
pub trait WindowMethods {
/// Presents the window to the screen (perhaps by page flipping).
fn present(&self);
/// Requests that the window system prepare a composite. Typically this will involve making
/// some type of platform-specific graphics context current. Returns true if the composite may
/// proceed and false if it should not.
fn prepare_for_composite(&self, width: DeviceUintLength, height: DeviceUintLength) -> bool;
/// Return the GL function pointer trait.
#[cfg(feature = "gleam")]
fn gl(&self) -> Rc<gl::Gl>;
/// Returns a thread-safe object to wake up the window's event loop.
fn create_event_loop_waker(&self) -> Box<EventLoopWaker>;
/// Get the coordinates of the native window, the screen and the framebuffer.
fn get_coordinates(&self) -> EmbedderCoordinates;
/// Set whether the application is currently animating.
/// Typically, when animations are active, the window
/// will want to avoid blocking on UI events, and just
/// run the event loop at the vsync interval.
fn set_animation_state(&self, _state: AnimationState);
}
#[derive(Clone, Copy, Debug)]
pub struct EmbedderCoordinates {
/// The pixel density of the display.
pub hidpi_factor: TypedScale<f32, DeviceIndependentPixel, DevicePixel>,
/// Size of the screen.
pub screen: DeviceUintSize,
/// Size of the available screen space (screen without toolbars and docks).
pub screen_avail: DeviceUintSize,
/// Size of the native window.
pub window: (DeviceUintSize, DeviceIntPoint),
/// Size of the GL buffer in the window.
pub framebuffer: DeviceUintSize,
/// Coordinates of the document within the framebuffer.
pub viewport: DeviceUintRect,
}
|
/// visible one.
SelectBrowser(TopLevelBrowsingContextId),
/// Toggles a debug flag in WebRender
|
random_line_split
|
windowing.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Abstract windowing methods. The concrete implementations of these can be found in `platform/`.
use embedder_traits::EventLoopWaker;
use euclid::TypedScale;
#[cfg(feature = "gleam")]
use gleam::gl;
use keyboard_types::KeyboardEvent;
use msg::constellation_msg::{TopLevelBrowsingContextId, TraversalDirection};
use script_traits::{MouseButton, TouchEventType, TouchId};
use servo_geometry::{DeviceIndependentPixel, DeviceUintLength};
use servo_url::ServoUrl;
use std::fmt::{Debug, Error, Formatter};
#[cfg(feature = "gleam")]
use std::rc::Rc;
use style_traits::DevicePixel;
use webrender_api::{DeviceIntPoint, DevicePoint, DeviceUintSize, DeviceUintRect, ScrollLocation};
#[derive(Clone)]
pub enum MouseWindowEvent {
Click(MouseButton, DevicePoint),
MouseDown(MouseButton, DevicePoint),
MouseUp(MouseButton, DevicePoint),
}
/// Various debug and profiling flags that WebRender supports.
#[derive(Clone)]
pub enum WebRenderDebugOption {
Profiler,
TextureCacheDebug,
RenderTargetDebug,
}
/// Events that the windowing system sends to Servo.
#[derive(Clone)]
pub enum WindowEvent {
/// Sent when no message has arrived, but the event loop was kicked for some reason (perhaps
/// by another Servo subsystem).
///
/// FIXME(pcwalton): This is kind of ugly and may not work well with multiprocess Servo.
/// It's possible that this should be something like
/// `CompositorMessageWindowEvent(compositor_thread::Msg)` instead.
Idle,
/// Sent when part of the window is marked dirty and needs to be redrawn. Before sending this
/// message, the window must make the same GL context as in `PrepareRenderingEvent` current.
Refresh,
/// Sent when the window is resized.
Resize,
/// Sent when a new URL is to be loaded.
LoadUrl(TopLevelBrowsingContextId, ServoUrl),
/// Sent when a mouse hit test is to be performed.
MouseWindowEventClass(MouseWindowEvent),
/// Sent when a mouse move.
MouseWindowMoveEventClass(DevicePoint),
/// Touch event: type, identifier, point
Touch(TouchEventType, TouchId, DevicePoint),
/// Sent when the user scrolls. The first point is the delta and the second point is the
/// origin.
Scroll(ScrollLocation, DeviceIntPoint, TouchEventType),
/// Sent when the user zooms.
Zoom(f32),
/// Simulated "pinch zoom" gesture for non-touch platforms (e.g. ctrl-scrollwheel).
PinchZoom(f32),
/// Sent when the user resets zoom to default.
ResetZoom,
/// Sent when the user uses chrome navigation (i.e. backspace or shift-backspace).
Navigation(TopLevelBrowsingContextId, TraversalDirection),
/// Sent when the user quits the application
Quit,
/// Sent when a key input state changes
Keyboard(KeyboardEvent),
/// Sent when Ctr+R/Apple+R is called to reload the current page.
Reload(TopLevelBrowsingContextId),
/// Create a new top level browsing context
NewBrowser(ServoUrl, TopLevelBrowsingContextId),
/// Close a top level browsing context
CloseBrowser(TopLevelBrowsingContextId),
/// Panic a top level browsing context.
SendError(Option<TopLevelBrowsingContextId>, String),
/// Make a top level browsing context visible, hiding the previous
/// visible one.
SelectBrowser(TopLevelBrowsingContextId),
/// Toggles a debug flag in WebRender
ToggleWebRenderDebug(WebRenderDebugOption),
/// Capture current WebRender
CaptureWebRender,
}
impl Debug for WindowEvent {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error>
|
WindowEvent::SelectBrowser(..) => write!(f, "SelectBrowser"),
WindowEvent::ToggleWebRenderDebug(..) => write!(f, "ToggleWebRenderDebug"),
WindowEvent::CaptureWebRender => write!(f, "CaptureWebRender"),
}
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum AnimationState {
Idle,
Animating,
}
pub trait WindowMethods {
/// Presents the window to the screen (perhaps by page flipping).
fn present(&self);
/// Requests that the window system prepare a composite. Typically this will involve making
/// some type of platform-specific graphics context current. Returns true if the composite may
/// proceed and false if it should not.
fn prepare_for_composite(&self, width: DeviceUintLength, height: DeviceUintLength) -> bool;
/// Return the GL function pointer trait.
#[cfg(feature = "gleam")]
fn gl(&self) -> Rc<gl::Gl>;
/// Returns a thread-safe object to wake up the window's event loop.
fn create_event_loop_waker(&self) -> Box<EventLoopWaker>;
/// Get the coordinates of the native window, the screen and the framebuffer.
fn get_coordinates(&self) -> EmbedderCoordinates;
/// Set whether the application is currently animating.
/// Typically, when animations are active, the window
/// will want to avoid blocking on UI events, and just
/// run the event loop at the vsync interval.
fn set_animation_state(&self, _state: AnimationState);
}
#[derive(Clone, Copy, Debug)]
pub struct EmbedderCoordinates {
/// The pixel density of the display.
pub hidpi_factor: TypedScale<f32, DeviceIndependentPixel, DevicePixel>,
/// Size of the screen.
pub screen: DeviceUintSize,
/// Size of the available screen space (screen without toolbars and docks).
pub screen_avail: DeviceUintSize,
/// Size of the native window.
pub window: (DeviceUintSize, DeviceIntPoint),
/// Size of the GL buffer in the window.
pub framebuffer: DeviceUintSize,
/// Coordinates of the document within the framebuffer.
pub viewport: DeviceUintRect,
}
|
{
match *self {
WindowEvent::Idle => write!(f, "Idle"),
WindowEvent::Refresh => write!(f, "Refresh"),
WindowEvent::Resize => write!(f, "Resize"),
WindowEvent::Keyboard(..) => write!(f, "Keyboard"),
WindowEvent::LoadUrl(..) => write!(f, "LoadUrl"),
WindowEvent::MouseWindowEventClass(..) => write!(f, "Mouse"),
WindowEvent::MouseWindowMoveEventClass(..) => write!(f, "MouseMove"),
WindowEvent::Touch(..) => write!(f, "Touch"),
WindowEvent::Scroll(..) => write!(f, "Scroll"),
WindowEvent::Zoom(..) => write!(f, "Zoom"),
WindowEvent::PinchZoom(..) => write!(f, "PinchZoom"),
WindowEvent::ResetZoom => write!(f, "ResetZoom"),
WindowEvent::Navigation(..) => write!(f, "Navigation"),
WindowEvent::Quit => write!(f, "Quit"),
WindowEvent::Reload(..) => write!(f, "Reload"),
WindowEvent::NewBrowser(..) => write!(f, "NewBrowser"),
WindowEvent::SendError(..) => write!(f, "SendError"),
WindowEvent::CloseBrowser(..) => write!(f, "CloseBrowser"),
|
identifier_body
|
windowing.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Abstract windowing methods. The concrete implementations of these can be found in `platform/`.
use embedder_traits::EventLoopWaker;
use euclid::TypedScale;
#[cfg(feature = "gleam")]
use gleam::gl;
use keyboard_types::KeyboardEvent;
use msg::constellation_msg::{TopLevelBrowsingContextId, TraversalDirection};
use script_traits::{MouseButton, TouchEventType, TouchId};
use servo_geometry::{DeviceIndependentPixel, DeviceUintLength};
use servo_url::ServoUrl;
use std::fmt::{Debug, Error, Formatter};
#[cfg(feature = "gleam")]
use std::rc::Rc;
use style_traits::DevicePixel;
use webrender_api::{DeviceIntPoint, DevicePoint, DeviceUintSize, DeviceUintRect, ScrollLocation};
#[derive(Clone)]
pub enum
|
{
Click(MouseButton, DevicePoint),
MouseDown(MouseButton, DevicePoint),
MouseUp(MouseButton, DevicePoint),
}
/// Various debug and profiling flags that WebRender supports.
#[derive(Clone)]
pub enum WebRenderDebugOption {
Profiler,
TextureCacheDebug,
RenderTargetDebug,
}
/// Events that the windowing system sends to Servo.
#[derive(Clone)]
pub enum WindowEvent {
/// Sent when no message has arrived, but the event loop was kicked for some reason (perhaps
/// by another Servo subsystem).
///
/// FIXME(pcwalton): This is kind of ugly and may not work well with multiprocess Servo.
/// It's possible that this should be something like
/// `CompositorMessageWindowEvent(compositor_thread::Msg)` instead.
Idle,
/// Sent when part of the window is marked dirty and needs to be redrawn. Before sending this
/// message, the window must make the same GL context as in `PrepareRenderingEvent` current.
Refresh,
/// Sent when the window is resized.
Resize,
/// Sent when a new URL is to be loaded.
LoadUrl(TopLevelBrowsingContextId, ServoUrl),
/// Sent when a mouse hit test is to be performed.
MouseWindowEventClass(MouseWindowEvent),
/// Sent when a mouse move.
MouseWindowMoveEventClass(DevicePoint),
/// Touch event: type, identifier, point
Touch(TouchEventType, TouchId, DevicePoint),
/// Sent when the user scrolls. The first point is the delta and the second point is the
/// origin.
Scroll(ScrollLocation, DeviceIntPoint, TouchEventType),
/// Sent when the user zooms.
Zoom(f32),
/// Simulated "pinch zoom" gesture for non-touch platforms (e.g. ctrl-scrollwheel).
PinchZoom(f32),
/// Sent when the user resets zoom to default.
ResetZoom,
/// Sent when the user uses chrome navigation (i.e. backspace or shift-backspace).
Navigation(TopLevelBrowsingContextId, TraversalDirection),
/// Sent when the user quits the application
Quit,
/// Sent when a key input state changes
Keyboard(KeyboardEvent),
/// Sent when Ctr+R/Apple+R is called to reload the current page.
Reload(TopLevelBrowsingContextId),
/// Create a new top level browsing context
NewBrowser(ServoUrl, TopLevelBrowsingContextId),
/// Close a top level browsing context
CloseBrowser(TopLevelBrowsingContextId),
/// Panic a top level browsing context.
SendError(Option<TopLevelBrowsingContextId>, String),
/// Make a top level browsing context visible, hiding the previous
/// visible one.
SelectBrowser(TopLevelBrowsingContextId),
/// Toggles a debug flag in WebRender
ToggleWebRenderDebug(WebRenderDebugOption),
/// Capture current WebRender
CaptureWebRender,
}
impl Debug for WindowEvent {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
match *self {
WindowEvent::Idle => write!(f, "Idle"),
WindowEvent::Refresh => write!(f, "Refresh"),
WindowEvent::Resize => write!(f, "Resize"),
WindowEvent::Keyboard(..) => write!(f, "Keyboard"),
WindowEvent::LoadUrl(..) => write!(f, "LoadUrl"),
WindowEvent::MouseWindowEventClass(..) => write!(f, "Mouse"),
WindowEvent::MouseWindowMoveEventClass(..) => write!(f, "MouseMove"),
WindowEvent::Touch(..) => write!(f, "Touch"),
WindowEvent::Scroll(..) => write!(f, "Scroll"),
WindowEvent::Zoom(..) => write!(f, "Zoom"),
WindowEvent::PinchZoom(..) => write!(f, "PinchZoom"),
WindowEvent::ResetZoom => write!(f, "ResetZoom"),
WindowEvent::Navigation(..) => write!(f, "Navigation"),
WindowEvent::Quit => write!(f, "Quit"),
WindowEvent::Reload(..) => write!(f, "Reload"),
WindowEvent::NewBrowser(..) => write!(f, "NewBrowser"),
WindowEvent::SendError(..) => write!(f, "SendError"),
WindowEvent::CloseBrowser(..) => write!(f, "CloseBrowser"),
WindowEvent::SelectBrowser(..) => write!(f, "SelectBrowser"),
WindowEvent::ToggleWebRenderDebug(..) => write!(f, "ToggleWebRenderDebug"),
WindowEvent::CaptureWebRender => write!(f, "CaptureWebRender"),
}
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum AnimationState {
Idle,
Animating,
}
pub trait WindowMethods {
/// Presents the window to the screen (perhaps by page flipping).
fn present(&self);
/// Requests that the window system prepare a composite. Typically this will involve making
/// some type of platform-specific graphics context current. Returns true if the composite may
/// proceed and false if it should not.
fn prepare_for_composite(&self, width: DeviceUintLength, height: DeviceUintLength) -> bool;
/// Return the GL function pointer trait.
#[cfg(feature = "gleam")]
fn gl(&self) -> Rc<gl::Gl>;
/// Returns a thread-safe object to wake up the window's event loop.
fn create_event_loop_waker(&self) -> Box<EventLoopWaker>;
/// Get the coordinates of the native window, the screen and the framebuffer.
fn get_coordinates(&self) -> EmbedderCoordinates;
/// Set whether the application is currently animating.
/// Typically, when animations are active, the window
/// will want to avoid blocking on UI events, and just
/// run the event loop at the vsync interval.
fn set_animation_state(&self, _state: AnimationState);
}
#[derive(Clone, Copy, Debug)]
pub struct EmbedderCoordinates {
/// The pixel density of the display.
pub hidpi_factor: TypedScale<f32, DeviceIndependentPixel, DevicePixel>,
/// Size of the screen.
pub screen: DeviceUintSize,
/// Size of the available screen space (screen without toolbars and docks).
pub screen_avail: DeviceUintSize,
/// Size of the native window.
pub window: (DeviceUintSize, DeviceIntPoint),
/// Size of the GL buffer in the window.
pub framebuffer: DeviceUintSize,
/// Coordinates of the document within the framebuffer.
pub viewport: DeviceUintRect,
}
|
MouseWindowEvent
|
identifier_name
|
sendable.rs
|
// Copyright 2015 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under (1) the MaidSafe.net Commercial License,
// version 1.0 or later, or (2) The General Public License (GPL), version 3, depending on which
// licence you accepted on initial access to the Software (the "Licences").
//
// By contributing code to the SAFE Network Software, or to this project generally, you agree to be
// bound by the terms of the MaidSafe Contributor Agreement, version 1.0. This, along with the
// Licenses can be found in the root directory of this project at LICENSE, COPYING and CONTRIBUTOR.
//
// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied.
//
// Please review the Licences for the specific language governing permissions and limitations
// relating to use of the SAFE Network Software.
use name_type;
/// This trait is required for any type of message to be
/// passed to routing, refresh / account transfer is optional
/// The name will let routing know its a NaeManager and the owner will allow routing to hash
/// the requesters ID with this name (by hashing the requesters ID) for put and post messages
pub trait Sendable {
fn name(&self)->name_type::NameType;
fn type_tag(&self)->u64;
fn serialised_contents(&self)->Vec<u8>;
fn owner(&self)->Option<name_type::NameType>
|
fn refresh(&self)->bool; // is this an account transfer type
fn merge(&self, responses: Vec<Box<Sendable>>) -> Option<Box<Sendable>>;
}
|
{ Option::None }
|
identifier_body
|
sendable.rs
|
// Copyright 2015 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under (1) the MaidSafe.net Commercial License,
// version 1.0 or later, or (2) The General Public License (GPL), version 3, depending on which
// licence you accepted on initial access to the Software (the "Licences").
//
// By contributing code to the SAFE Network Software, or to this project generally, you agree to be
// bound by the terms of the MaidSafe Contributor Agreement, version 1.0. This, along with the
// Licenses can be found in the root directory of this project at LICENSE, COPYING and CONTRIBUTOR.
//
// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied.
//
// Please review the Licences for the specific language governing permissions and limitations
// relating to use of the SAFE Network Software.
use name_type;
/// This trait is required for any type of message to be
/// passed to routing, refresh / account transfer is optional
/// The name will let routing know its a NaeManager and the owner will allow routing to hash
/// the requesters ID with this name (by hashing the requesters ID) for put and post messages
pub trait Sendable {
fn name(&self)->name_type::NameType;
fn type_tag(&self)->u64;
fn serialised_contents(&self)->Vec<u8>;
fn
|
(&self)->Option<name_type::NameType> { Option::None }
fn refresh(&self)->bool; // is this an account transfer type
fn merge(&self, responses: Vec<Box<Sendable>>) -> Option<Box<Sendable>>;
}
|
owner
|
identifier_name
|
sendable.rs
|
// Copyright 2015 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under (1) the MaidSafe.net Commercial License,
// version 1.0 or later, or (2) The General Public License (GPL), version 3, depending on which
// licence you accepted on initial access to the Software (the "Licences").
//
// By contributing code to the SAFE Network Software, or to this project generally, you agree to be
// bound by the terms of the MaidSafe Contributor Agreement, version 1.0. This, along with the
// Licenses can be found in the root directory of this project at LICENSE, COPYING and CONTRIBUTOR.
//
// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied.
//
// Please review the Licences for the specific language governing permissions and limitations
// relating to use of the SAFE Network Software.
|
/// passed to routing, refresh / account transfer is optional
/// The name will let routing know its a NaeManager and the owner will allow routing to hash
/// the requesters ID with this name (by hashing the requesters ID) for put and post messages
pub trait Sendable {
fn name(&self)->name_type::NameType;
fn type_tag(&self)->u64;
fn serialised_contents(&self)->Vec<u8>;
fn owner(&self)->Option<name_type::NameType> { Option::None }
fn refresh(&self)->bool; // is this an account transfer type
fn merge(&self, responses: Vec<Box<Sendable>>) -> Option<Box<Sendable>>;
}
|
use name_type;
/// This trait is required for any type of message to be
|
random_line_split
|
missing-allocator.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -C panic=abort
// no-prefer-dynamic
#![no_std]
#![crate_type = "staticlib"]
#![feature(panic_handler, alloc_error_handler, alloc)]
#[panic_handler]
fn panic(_: &core::panic::PanicInfo) ->! {
loop {}
}
#[alloc_error_handler]
fn oom(_: core::alloc::Layout) ->! {
loop {}
}
extern crate alloc;
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
random_line_split
|
missing-allocator.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -C panic=abort
// no-prefer-dynamic
#![no_std]
#![crate_type = "staticlib"]
#![feature(panic_handler, alloc_error_handler, alloc)]
#[panic_handler]
fn
|
(_: &core::panic::PanicInfo) ->! {
loop {}
}
#[alloc_error_handler]
fn oom(_: core::alloc::Layout) ->! {
loop {}
}
extern crate alloc;
|
panic
|
identifier_name
|
missing-allocator.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -C panic=abort
// no-prefer-dynamic
#![no_std]
#![crate_type = "staticlib"]
#![feature(panic_handler, alloc_error_handler, alloc)]
#[panic_handler]
fn panic(_: &core::panic::PanicInfo) ->! {
loop {}
}
#[alloc_error_handler]
fn oom(_: core::alloc::Layout) ->!
|
extern crate alloc;
|
{
loop {}
}
|
identifier_body
|
lib.rs
|
extern crate racer_interner;
#[macro_use]
extern crate serde;
extern crate serde_json;
pub mod mapping;
pub mod metadata;
use crate::metadata::Metadata;
use std::env;
use std::error::Error;
use std::fmt;
use std::io;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::str::Utf8Error;
#[derive(Debug)]
pub enum ErrorKind {
Encode(Utf8Error),
Json(serde_json::Error),
Io(io::Error),
Subprocess(String),
}
impl fmt::Display for ErrorKind {
fn
|
(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ErrorKind::Encode(e) => fmt::Display::fmt(e, f),
ErrorKind::Json(e) => fmt::Display::fmt(e, f),
ErrorKind::Io(e) => fmt::Display::fmt(e, f),
ErrorKind::Subprocess(s) => write!(f, "stderr: {}", s),
}
}
}
impl Error for ErrorKind {}
impl From<Utf8Error> for ErrorKind {
fn from(e: Utf8Error) -> ErrorKind {
ErrorKind::Encode(e)
}
}
impl From<serde_json::Error> for ErrorKind {
fn from(e: serde_json::Error) -> ErrorKind {
ErrorKind::Json(e)
}
}
impl From<io::Error> for ErrorKind {
fn from(e: io::Error) -> ErrorKind {
ErrorKind::Io(e)
}
}
pub fn find_manifest(mut current: &Path) -> Option<PathBuf> {
let file = "Cargo.toml";
if current.is_dir() {
let manifest = current.join(file);
if manifest.exists() {
return Some(manifest);
}
}
while let Some(parent) = current.parent() {
let manifest = parent.join(file);
if manifest.exists() {
return Some(manifest);
}
current = parent;
}
None
}
pub fn run(manifest_path: &Path, frozen: bool) -> Result<Metadata, ErrorKind> {
let cargo = env::var("CARGO").unwrap_or_else(|_| "cargo".to_owned());
let mut cmd = Command::new(cargo);
cmd.arg("metadata");
cmd.arg("--all-features");
cmd.args(&["--format-version", "1"]);
cmd.args(&["--color", "never"]);
cmd.arg("--manifest-path");
cmd.arg(manifest_path.as_os_str());
if frozen {
cmd.arg("--frozen");
}
let op = cmd.output()?;
if!op.status.success() {
let stderr = String::from_utf8(op.stderr).map_err(|e| e.utf8_error())?;
return Err(ErrorKind::Subprocess(stderr));
}
serde_json::from_slice(&op.stdout).map_err(From::from)
}
|
fmt
|
identifier_name
|
lib.rs
|
extern crate racer_interner;
#[macro_use]
extern crate serde;
extern crate serde_json;
pub mod mapping;
pub mod metadata;
use crate::metadata::Metadata;
use std::env;
use std::error::Error;
use std::fmt;
use std::io;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::str::Utf8Error;
#[derive(Debug)]
pub enum ErrorKind {
Encode(Utf8Error),
Json(serde_json::Error),
Io(io::Error),
Subprocess(String),
}
impl fmt::Display for ErrorKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ErrorKind::Encode(e) => fmt::Display::fmt(e, f),
ErrorKind::Json(e) => fmt::Display::fmt(e, f),
ErrorKind::Io(e) => fmt::Display::fmt(e, f),
ErrorKind::Subprocess(s) => write!(f, "stderr: {}", s),
}
}
}
impl Error for ErrorKind {}
impl From<Utf8Error> for ErrorKind {
fn from(e: Utf8Error) -> ErrorKind
|
}
impl From<serde_json::Error> for ErrorKind {
fn from(e: serde_json::Error) -> ErrorKind {
ErrorKind::Json(e)
}
}
impl From<io::Error> for ErrorKind {
fn from(e: io::Error) -> ErrorKind {
ErrorKind::Io(e)
}
}
pub fn find_manifest(mut current: &Path) -> Option<PathBuf> {
let file = "Cargo.toml";
if current.is_dir() {
let manifest = current.join(file);
if manifest.exists() {
return Some(manifest);
}
}
while let Some(parent) = current.parent() {
let manifest = parent.join(file);
if manifest.exists() {
return Some(manifest);
}
current = parent;
}
None
}
pub fn run(manifest_path: &Path, frozen: bool) -> Result<Metadata, ErrorKind> {
let cargo = env::var("CARGO").unwrap_or_else(|_| "cargo".to_owned());
let mut cmd = Command::new(cargo);
cmd.arg("metadata");
cmd.arg("--all-features");
cmd.args(&["--format-version", "1"]);
cmd.args(&["--color", "never"]);
cmd.arg("--manifest-path");
cmd.arg(manifest_path.as_os_str());
if frozen {
cmd.arg("--frozen");
}
let op = cmd.output()?;
if!op.status.success() {
let stderr = String::from_utf8(op.stderr).map_err(|e| e.utf8_error())?;
return Err(ErrorKind::Subprocess(stderr));
}
serde_json::from_slice(&op.stdout).map_err(From::from)
}
|
{
ErrorKind::Encode(e)
}
|
identifier_body
|
lib.rs
|
extern crate racer_interner;
#[macro_use]
extern crate serde;
extern crate serde_json;
pub mod mapping;
pub mod metadata;
use crate::metadata::Metadata;
use std::env;
use std::error::Error;
use std::fmt;
use std::io;
use std::path::{Path, PathBuf};
|
use std::str::Utf8Error;
#[derive(Debug)]
pub enum ErrorKind {
Encode(Utf8Error),
Json(serde_json::Error),
Io(io::Error),
Subprocess(String),
}
impl fmt::Display for ErrorKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ErrorKind::Encode(e) => fmt::Display::fmt(e, f),
ErrorKind::Json(e) => fmt::Display::fmt(e, f),
ErrorKind::Io(e) => fmt::Display::fmt(e, f),
ErrorKind::Subprocess(s) => write!(f, "stderr: {}", s),
}
}
}
impl Error for ErrorKind {}
impl From<Utf8Error> for ErrorKind {
fn from(e: Utf8Error) -> ErrorKind {
ErrorKind::Encode(e)
}
}
impl From<serde_json::Error> for ErrorKind {
fn from(e: serde_json::Error) -> ErrorKind {
ErrorKind::Json(e)
}
}
impl From<io::Error> for ErrorKind {
fn from(e: io::Error) -> ErrorKind {
ErrorKind::Io(e)
}
}
pub fn find_manifest(mut current: &Path) -> Option<PathBuf> {
let file = "Cargo.toml";
if current.is_dir() {
let manifest = current.join(file);
if manifest.exists() {
return Some(manifest);
}
}
while let Some(parent) = current.parent() {
let manifest = parent.join(file);
if manifest.exists() {
return Some(manifest);
}
current = parent;
}
None
}
pub fn run(manifest_path: &Path, frozen: bool) -> Result<Metadata, ErrorKind> {
let cargo = env::var("CARGO").unwrap_or_else(|_| "cargo".to_owned());
let mut cmd = Command::new(cargo);
cmd.arg("metadata");
cmd.arg("--all-features");
cmd.args(&["--format-version", "1"]);
cmd.args(&["--color", "never"]);
cmd.arg("--manifest-path");
cmd.arg(manifest_path.as_os_str());
if frozen {
cmd.arg("--frozen");
}
let op = cmd.output()?;
if!op.status.success() {
let stderr = String::from_utf8(op.stderr).map_err(|e| e.utf8_error())?;
return Err(ErrorKind::Subprocess(stderr));
}
serde_json::from_slice(&op.stdout).map_err(From::from)
}
|
use std::process::Command;
|
random_line_split
|
transaction_info_test.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use super::*;
use crate::DiemDB;
use diem_temppath::TempPath;
use proptest::{collection::vec, prelude::*};
fn verify(
store: &LedgerStore,
txn_infos: &[TransactionInfo],
first_version: Version,
ledger_version: Version,
root_hash: HashValue,
) {
txn_infos
.iter()
.enumerate()
.for_each(|(idx, expected_txn_info)| {
let version = first_version + idx as u64;
let txn_info_with_proof = store
.get_transaction_info_with_proof(version, ledger_version)
.unwrap();
assert_eq!(txn_info_with_proof.transaction_info(), expected_txn_info);
txn_info_with_proof
.ledger_info_to_transaction_info_proof()
.verify(
root_hash,
txn_info_with_proof.transaction_info().hash(),
version,
)
.unwrap();
})
}
fn save(store: &LedgerStore, first_version: Version, txn_infos: &[TransactionInfo]) -> HashValue {
let mut cs = ChangeSet::new();
let root_hash = store
.put_transaction_infos(first_version, &txn_infos, &mut cs)
.unwrap();
store.db.write_schemas(cs.batch).unwrap();
root_hash
}
proptest! {
|
#[test]
fn test_transaction_info_put_get_verify(
batch1 in vec(any::<TransactionInfo>(), 1..100),
batch2 in vec(any::<TransactionInfo>(), 1..100),
) {
let tmp_dir = TempPath::new();
let db = DiemDB::new_for_test(&tmp_dir);
let store = &db.ledger_store;
// insert two batches of transaction infos
let root_hash1 = save(store, 0, &batch1);
let ledger_version1 = batch1.len() as u64 - 1;
let root_hash2 = save(store, batch1.len() as u64, &batch2);
let ledger_version2 = batch1.len() as u64 + batch2.len() as u64 - 1;
// retrieve all leaves and verify against latest root hash
verify(store, &batch1, 0, ledger_version2, root_hash2);
verify(store, &batch2, batch1.len() as u64, ledger_version2, root_hash2);
// retrieve batch1 and verify against root_hash after batch1 was interted
verify(store, &batch1, 0, ledger_version1, root_hash1);
}
#[test]
fn test_transaction_info_get_iterator(
(infos, start_version, num_transaction_infos) in
vec(any::<TransactionInfo>(), 1..100)
.prop_flat_map(|infos| {
let num_infos = infos.len() as u64;
(Just(infos), 0..num_infos)
})
.prop_flat_map(|(infos, start_version)| {
let num_infos = infos.len() as u64;
(Just(infos), Just(start_version), 0..num_infos as usize * 2)
})
) {
let tmp_dir = TempPath::new();
let db = DiemDB::new_for_test(&tmp_dir);
let store = &db.ledger_store;
save(store, 0, &infos);
let iter = store
.get_transaction_info_iter(start_version, num_transaction_infos)
.unwrap();
prop_assert_eq!(
infos
.into_iter()
.skip(start_version as usize)
.take(num_transaction_infos as usize)
.collect::<Vec<_>>(),
iter.collect::<Result<Vec<_>>>().unwrap()
);
}
}
|
#![proptest_config(ProptestConfig::with_cases(10))]
|
random_line_split
|
transaction_info_test.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use super::*;
use crate::DiemDB;
use diem_temppath::TempPath;
use proptest::{collection::vec, prelude::*};
fn verify(
store: &LedgerStore,
txn_infos: &[TransactionInfo],
first_version: Version,
ledger_version: Version,
root_hash: HashValue,
)
|
})
}
fn save(store: &LedgerStore, first_version: Version, txn_infos: &[TransactionInfo]) -> HashValue {
let mut cs = ChangeSet::new();
let root_hash = store
.put_transaction_infos(first_version, &txn_infos, &mut cs)
.unwrap();
store.db.write_schemas(cs.batch).unwrap();
root_hash
}
proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn test_transaction_info_put_get_verify(
batch1 in vec(any::<TransactionInfo>(), 1..100),
batch2 in vec(any::<TransactionInfo>(), 1..100),
) {
let tmp_dir = TempPath::new();
let db = DiemDB::new_for_test(&tmp_dir);
let store = &db.ledger_store;
// insert two batches of transaction infos
let root_hash1 = save(store, 0, &batch1);
let ledger_version1 = batch1.len() as u64 - 1;
let root_hash2 = save(store, batch1.len() as u64, &batch2);
let ledger_version2 = batch1.len() as u64 + batch2.len() as u64 - 1;
// retrieve all leaves and verify against latest root hash
verify(store, &batch1, 0, ledger_version2, root_hash2);
verify(store, &batch2, batch1.len() as u64, ledger_version2, root_hash2);
// retrieve batch1 and verify against root_hash after batch1 was interted
verify(store, &batch1, 0, ledger_version1, root_hash1);
}
#[test]
fn test_transaction_info_get_iterator(
(infos, start_version, num_transaction_infos) in
vec(any::<TransactionInfo>(), 1..100)
.prop_flat_map(|infos| {
let num_infos = infos.len() as u64;
(Just(infos), 0..num_infos)
})
.prop_flat_map(|(infos, start_version)| {
let num_infos = infos.len() as u64;
(Just(infos), Just(start_version), 0..num_infos as usize * 2)
})
) {
let tmp_dir = TempPath::new();
let db = DiemDB::new_for_test(&tmp_dir);
let store = &db.ledger_store;
save(store, 0, &infos);
let iter = store
.get_transaction_info_iter(start_version, num_transaction_infos)
.unwrap();
prop_assert_eq!(
infos
.into_iter()
.skip(start_version as usize)
.take(num_transaction_infos as usize)
.collect::<Vec<_>>(),
iter.collect::<Result<Vec<_>>>().unwrap()
);
}
}
|
{
txn_infos
.iter()
.enumerate()
.for_each(|(idx, expected_txn_info)| {
let version = first_version + idx as u64;
let txn_info_with_proof = store
.get_transaction_info_with_proof(version, ledger_version)
.unwrap();
assert_eq!(txn_info_with_proof.transaction_info(), expected_txn_info);
txn_info_with_proof
.ledger_info_to_transaction_info_proof()
.verify(
root_hash,
txn_info_with_proof.transaction_info().hash(),
version,
)
.unwrap();
|
identifier_body
|
transaction_info_test.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use super::*;
use crate::DiemDB;
use diem_temppath::TempPath;
use proptest::{collection::vec, prelude::*};
fn
|
(
store: &LedgerStore,
txn_infos: &[TransactionInfo],
first_version: Version,
ledger_version: Version,
root_hash: HashValue,
) {
txn_infos
.iter()
.enumerate()
.for_each(|(idx, expected_txn_info)| {
let version = first_version + idx as u64;
let txn_info_with_proof = store
.get_transaction_info_with_proof(version, ledger_version)
.unwrap();
assert_eq!(txn_info_with_proof.transaction_info(), expected_txn_info);
txn_info_with_proof
.ledger_info_to_transaction_info_proof()
.verify(
root_hash,
txn_info_with_proof.transaction_info().hash(),
version,
)
.unwrap();
})
}
fn save(store: &LedgerStore, first_version: Version, txn_infos: &[TransactionInfo]) -> HashValue {
let mut cs = ChangeSet::new();
let root_hash = store
.put_transaction_infos(first_version, &txn_infos, &mut cs)
.unwrap();
store.db.write_schemas(cs.batch).unwrap();
root_hash
}
proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn test_transaction_info_put_get_verify(
batch1 in vec(any::<TransactionInfo>(), 1..100),
batch2 in vec(any::<TransactionInfo>(), 1..100),
) {
let tmp_dir = TempPath::new();
let db = DiemDB::new_for_test(&tmp_dir);
let store = &db.ledger_store;
// insert two batches of transaction infos
let root_hash1 = save(store, 0, &batch1);
let ledger_version1 = batch1.len() as u64 - 1;
let root_hash2 = save(store, batch1.len() as u64, &batch2);
let ledger_version2 = batch1.len() as u64 + batch2.len() as u64 - 1;
// retrieve all leaves and verify against latest root hash
verify(store, &batch1, 0, ledger_version2, root_hash2);
verify(store, &batch2, batch1.len() as u64, ledger_version2, root_hash2);
// retrieve batch1 and verify against root_hash after batch1 was interted
verify(store, &batch1, 0, ledger_version1, root_hash1);
}
#[test]
fn test_transaction_info_get_iterator(
(infos, start_version, num_transaction_infos) in
vec(any::<TransactionInfo>(), 1..100)
.prop_flat_map(|infos| {
let num_infos = infos.len() as u64;
(Just(infos), 0..num_infos)
})
.prop_flat_map(|(infos, start_version)| {
let num_infos = infos.len() as u64;
(Just(infos), Just(start_version), 0..num_infos as usize * 2)
})
) {
let tmp_dir = TempPath::new();
let db = DiemDB::new_for_test(&tmp_dir);
let store = &db.ledger_store;
save(store, 0, &infos);
let iter = store
.get_transaction_info_iter(start_version, num_transaction_infos)
.unwrap();
prop_assert_eq!(
infos
.into_iter()
.skip(start_version as usize)
.take(num_transaction_infos as usize)
.collect::<Vec<_>>(),
iter.collect::<Result<Vec<_>>>().unwrap()
);
}
}
|
verify
|
identifier_name
|
server.rs
|
// TODO: Implement a reader and writer from the client / server relationship.
// Then try and get clients to talk to everyone -> specific clients.
// Once that's done, move on to formatting messages via tokio::Encode / Decode.
// Example TCP
#[allow(unused_imports)]
use std::net::SocketAddr;
use std::collections::{HashMap};
use std::rc::Rc;
use std::cell::RefCell;
use std::io;
use futures::AsyncSink;
use futures::sink::{Sink};
use futures::sync::mpsc;
use futures::future::{Future};
use futures::stream::{Stream};
use tokio_core::io::Io;
use tokio_core::net::{TcpListener};
use tokio_core::reactor::{Core};
use client::TctClient;
use stanza::{Stanza, StanzaCodec, UserID};
pub struct
|
{
clients: Rc<RefCell<HashMap<UserID, mpsc::UnboundedSender<Stanza>>>>,
#[allow(dead_code)]
groups: Rc<RefCell<HashMap<UserID, mpsc::UnboundedSender<Stanza>>>>,
channel: (mpsc::UnboundedSender<Stanza>, mpsc::UnboundedReceiver<Stanza>),
core: Core,
addr: SocketAddr,
}
// Ixnay on the private client, defeats the purpose and actually breaks the design
// in the first place lulz.
impl TctServer {
/// Creates a new TctServer to be run.
pub fn new(addr: SocketAddr) -> TctServer {
let core = Core::new().unwrap();
TctServer {
// Odd way of doing this but yeah, core needs to be defined after
// channel.
channel: mpsc::unbounded(),
core: core,
addr: addr,
groups: Rc::new(RefCell::new(HashMap::new())),
clients: Rc::new(RefCell::new(HashMap::new()))
}
}
/// Execute the server, runs in foreground for any application...
/// This is a temporary design to get something off the ground.
pub fn run(&mut self) {
let socket =
TcpListener::bind(&self.addr, &self.core.handle().clone()).unwrap();
let handle = self.core.handle();
let clients = self.clients.clone();
let sender = self.channel.0.clone();
println!("Chat service running at port : {}", self.addr.port());
// For each incoming client connection at address 'addr'
let server = socket.incoming().for_each(|(stream, addr)| {
println!("Connected to client : {}", addr);
let mut server_sender = sender.clone();
// (writer, reader) == (sink, stream)
// reader -> sender
// receiver -> writer
let (mut writer, reader) = TctClient::new(stream, addr)
.framed(StanzaCodec).split();
let (sender, receiver) = mpsc::unbounded();
let clients_inner = clients.clone();
// Every message received over the stream, from client
let reader = reader.into_future().map_err(|(err, _)| err).and_then(
|(creds, stream)| {
if let Some(Stanza::LoginCredentials{ user, psw: _ }) = creds {
if user.len() == 0 {
println!("No username supplied. \
Closing stream...");
} else {
println!("User {} logged in!", user);
clients_inner.borrow_mut().insert(user, sender);
// TODO: System of verification that a user is in the db.
}
} else if let Some(Stanza::Register{ user, psw: _ }) = creds {
println!("New user {} logged in!", user);
clients_inner.borrow_mut().insert(user, sender);
} else {
println!("No login credentials supplied from client. \
Closing stream...");
// By not giving resources for sender into HashMap,
// data does not persist for this client, connection
// will then time-out.
}
stream.for_each(move |msg: Stanza| {
println!("Read made for"); // TODO: Give addr here
if let Some(to) = msg.to() {
clients_inner.borrow_mut().get_mut(&to)
.unwrap_or(&mut server_sender) // TODO:
.send(msg)
.or_else(
|err| Err(io::Error::new(
io::ErrorKind::Other, err)))
} else { panic!("Client reported error") }
})
}).map_err(|_| ());
let receiver = receiver.for_each(move |msg| {
println!("Writing message to {}", addr);
let response = match writer.start_send(msg) { // handle it like'send'
Ok(AsyncSink::Ready) => Ok(()),
Ok(AsyncSink::NotReady(_)) => panic!("failed to send"),
Err(_) => Err(())
};
writer.poll_complete().unwrap();
response
});
//let clients = self.clients.clone();
// TODO:'select' combinator
handle.spawn(receiver);
handle.spawn(reader);
Ok(())
});
self.core.run(server).unwrap();
}
}
|
TctServer
|
identifier_name
|
server.rs
|
// TODO: Implement a reader and writer from the client / server relationship.
// Then try and get clients to talk to everyone -> specific clients.
// Once that's done, move on to formatting messages via tokio::Encode / Decode.
// Example TCP
#[allow(unused_imports)]
use std::net::SocketAddr;
use std::collections::{HashMap};
use std::rc::Rc;
use std::cell::RefCell;
use std::io;
use futures::AsyncSink;
use futures::sink::{Sink};
use futures::sync::mpsc;
use futures::future::{Future};
use futures::stream::{Stream};
use tokio_core::io::Io;
use tokio_core::net::{TcpListener};
use tokio_core::reactor::{Core};
use client::TctClient;
use stanza::{Stanza, StanzaCodec, UserID};
pub struct TctServer {
clients: Rc<RefCell<HashMap<UserID, mpsc::UnboundedSender<Stanza>>>>,
#[allow(dead_code)]
groups: Rc<RefCell<HashMap<UserID, mpsc::UnboundedSender<Stanza>>>>,
channel: (mpsc::UnboundedSender<Stanza>, mpsc::UnboundedReceiver<Stanza>),
core: Core,
addr: SocketAddr,
}
// Ixnay on the private client, defeats the purpose and actually breaks the design
// in the first place lulz.
impl TctServer {
/// Creates a new TctServer to be run.
pub fn new(addr: SocketAddr) -> TctServer
|
/// Execute the server, runs in foreground for any application...
/// This is a temporary design to get something off the ground.
pub fn run(&mut self) {
let socket =
TcpListener::bind(&self.addr, &self.core.handle().clone()).unwrap();
let handle = self.core.handle();
let clients = self.clients.clone();
let sender = self.channel.0.clone();
println!("Chat service running at port : {}", self.addr.port());
// For each incoming client connection at address 'addr'
let server = socket.incoming().for_each(|(stream, addr)| {
println!("Connected to client : {}", addr);
let mut server_sender = sender.clone();
// (writer, reader) == (sink, stream)
// reader -> sender
// receiver -> writer
let (mut writer, reader) = TctClient::new(stream, addr)
.framed(StanzaCodec).split();
let (sender, receiver) = mpsc::unbounded();
let clients_inner = clients.clone();
// Every message received over the stream, from client
let reader = reader.into_future().map_err(|(err, _)| err).and_then(
|(creds, stream)| {
if let Some(Stanza::LoginCredentials{ user, psw: _ }) = creds {
if user.len() == 0 {
println!("No username supplied. \
Closing stream...");
} else {
println!("User {} logged in!", user);
clients_inner.borrow_mut().insert(user, sender);
// TODO: System of verification that a user is in the db.
}
} else if let Some(Stanza::Register{ user, psw: _ }) = creds {
println!("New user {} logged in!", user);
clients_inner.borrow_mut().insert(user, sender);
} else {
println!("No login credentials supplied from client. \
Closing stream...");
// By not giving resources for sender into HashMap,
// data does not persist for this client, connection
// will then time-out.
}
stream.for_each(move |msg: Stanza| {
println!("Read made for"); // TODO: Give addr here
if let Some(to) = msg.to() {
clients_inner.borrow_mut().get_mut(&to)
.unwrap_or(&mut server_sender) // TODO:
.send(msg)
.or_else(
|err| Err(io::Error::new(
io::ErrorKind::Other, err)))
} else { panic!("Client reported error") }
})
}).map_err(|_| ());
let receiver = receiver.for_each(move |msg| {
println!("Writing message to {}", addr);
let response = match writer.start_send(msg) { // handle it like'send'
Ok(AsyncSink::Ready) => Ok(()),
Ok(AsyncSink::NotReady(_)) => panic!("failed to send"),
Err(_) => Err(())
};
writer.poll_complete().unwrap();
response
});
//let clients = self.clients.clone();
// TODO:'select' combinator
handle.spawn(receiver);
handle.spawn(reader);
Ok(())
});
self.core.run(server).unwrap();
}
}
|
{
let core = Core::new().unwrap();
TctServer {
// Odd way of doing this but yeah, core needs to be defined after
// channel.
channel: mpsc::unbounded(),
core: core,
addr: addr,
groups: Rc::new(RefCell::new(HashMap::new())),
clients: Rc::new(RefCell::new(HashMap::new()))
}
}
|
identifier_body
|
server.rs
|
// TODO: Implement a reader and writer from the client / server relationship.
// Then try and get clients to talk to everyone -> specific clients.
// Once that's done, move on to formatting messages via tokio::Encode / Decode.
// Example TCP
#[allow(unused_imports)]
use std::net::SocketAddr;
use std::collections::{HashMap};
use std::rc::Rc;
use std::cell::RefCell;
use std::io;
use futures::AsyncSink;
use futures::sink::{Sink};
use futures::sync::mpsc;
use futures::future::{Future};
use futures::stream::{Stream};
use tokio_core::io::Io;
use tokio_core::net::{TcpListener};
use tokio_core::reactor::{Core};
use client::TctClient;
use stanza::{Stanza, StanzaCodec, UserID};
pub struct TctServer {
clients: Rc<RefCell<HashMap<UserID, mpsc::UnboundedSender<Stanza>>>>,
#[allow(dead_code)]
groups: Rc<RefCell<HashMap<UserID, mpsc::UnboundedSender<Stanza>>>>,
channel: (mpsc::UnboundedSender<Stanza>, mpsc::UnboundedReceiver<Stanza>),
core: Core,
addr: SocketAddr,
}
// Ixnay on the private client, defeats the purpose and actually breaks the design
// in the first place lulz.
impl TctServer {
/// Creates a new TctServer to be run.
pub fn new(addr: SocketAddr) -> TctServer {
let core = Core::new().unwrap();
TctServer {
// Odd way of doing this but yeah, core needs to be defined after
// channel.
channel: mpsc::unbounded(),
core: core,
addr: addr,
groups: Rc::new(RefCell::new(HashMap::new())),
clients: Rc::new(RefCell::new(HashMap::new()))
}
}
/// Execute the server, runs in foreground for any application...
/// This is a temporary design to get something off the ground.
pub fn run(&mut self) {
let socket =
TcpListener::bind(&self.addr, &self.core.handle().clone()).unwrap();
let handle = self.core.handle();
let clients = self.clients.clone();
let sender = self.channel.0.clone();
println!("Chat service running at port : {}", self.addr.port());
// For each incoming client connection at address 'addr'
let server = socket.incoming().for_each(|(stream, addr)| {
println!("Connected to client : {}", addr);
let mut server_sender = sender.clone();
// (writer, reader) == (sink, stream)
// reader -> sender
// receiver -> writer
let (mut writer, reader) = TctClient::new(stream, addr)
.framed(StanzaCodec).split();
let (sender, receiver) = mpsc::unbounded();
let clients_inner = clients.clone();
// Every message received over the stream, from client
let reader = reader.into_future().map_err(|(err, _)| err).and_then(
|(creds, stream)| {
if let Some(Stanza::LoginCredentials{ user, psw: _ }) = creds {
if user.len() == 0 {
println!("No username supplied. \
Closing stream...");
} else {
println!("User {} logged in!", user);
clients_inner.borrow_mut().insert(user, sender);
// TODO: System of verification that a user is in the db.
}
} else if let Some(Stanza::Register{ user, psw: _ }) = creds {
println!("New user {} logged in!", user);
clients_inner.borrow_mut().insert(user, sender);
} else {
println!("No login credentials supplied from client. \
Closing stream...");
// By not giving resources for sender into HashMap,
// data does not persist for this client, connection
// will then time-out.
}
stream.for_each(move |msg: Stanza| {
println!("Read made for"); // TODO: Give addr here
if let Some(to) = msg.to() {
clients_inner.borrow_mut().get_mut(&to)
.unwrap_or(&mut server_sender) // TODO:
.send(msg)
.or_else(
|err| Err(io::Error::new(
io::ErrorKind::Other, err)))
} else { panic!("Client reported error") }
})
}).map_err(|_| ());
let receiver = receiver.for_each(move |msg| {
println!("Writing message to {}", addr);
let response = match writer.start_send(msg) { // handle it like'send'
Ok(AsyncSink::Ready) => Ok(()),
Ok(AsyncSink::NotReady(_)) => panic!("failed to send"),
Err(_) => Err(())
};
writer.poll_complete().unwrap();
response
});
//let clients = self.clients.clone();
// TODO:'select' combinator
handle.spawn(receiver);
handle.spawn(reader);
Ok(())
});
|
}
}
|
self.core.run(server).unwrap();
|
random_line_split
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.