file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
bench.rs | #![feature(test)]
extern crate robots;
extern crate test;
use std::any::Any;
use std::sync::{Arc, Mutex};
use std::sync::mpsc::{channel, Sender};
use robots::actors::{Actor, ActorSystem, ActorCell, ActorContext, Props};
use test::Bencher;
#[derive(Copy, Clone, PartialEq)]
enum BenchMessage {
Nothing,
Over,
}
struct InternalState {
sender: Arc<Mutex<Sender<()>>>,
}
impl Actor for InternalState {
fn receive(&self, message: Box<Any>, _context: ActorCell) {
if let Ok(message) = Box::<Any>::downcast::<BenchMessage>(message) {
if *message == BenchMessage::Over {
let _ = self.sender.lock().unwrap().send(());
}
}
}
}
impl InternalState {
fn new(sender: Arc<Mutex<Sender<()>>>) -> InternalState {
InternalState { sender: sender }
}
}
#[bench]
/// This bench sends a thousand messages to an actor then waits for an answer on a channel.
/// When the thousandth is handled the actor sends a message on the above channel.
fn send_1000_messages(b: &mut Bencher) {
let actor_system = ActorSystem::new("test".to_owned());
let (tx, rx) = channel();
let tx = Arc::new(Mutex::new(tx));
let props = Props::new(Arc::new(InternalState::new), tx);
let actor_ref_1 = actor_system.actor_of(props.clone(), "sender".to_owned());
let actor_ref_2 = actor_system.actor_of(props.clone(), "receiver".to_owned());
b.iter(|| {
for _ in 0..999 {
actor_ref_1.tell_to(actor_ref_2.clone(), BenchMessage::Nothing);
}
actor_ref_1.tell_to(actor_ref_2.clone(), BenchMessage::Over);
let _ = rx.recv();
});
actor_system.shutdown();
}
struct Dummy;
impl Actor for Dummy {
fn receive(&self, _message: Box<Any>, _context: ActorCell) {}
}
impl Dummy {
fn new(_: ()) -> Dummy {
Dummy
}
}
#[bench]
/// This bench creates a thousand empty actors. |
let props = Props::new(Arc::new(Dummy::new), ());
b.iter(|| {
for i in 0..1_000 {
actor_system.actor_of(props.clone(), format!("{}", i));
}
});
actor_system.shutdown();
} | /// Since actor creation is synchronous this is ok to just call the function mutiple times.
/// The created actor is empty in order to just bench the overhead of creation.
fn create_1000_actors(b: &mut Bencher) {
let actor_system = ActorSystem::new("test".to_owned()); | random_line_split |
term_query.rs | use super::term_weight::TermWeight;
use crate::query::bm25::Bm25Weight;
use crate::query::Weight;
use crate::query::{Explanation, Query};
use crate::schema::IndexRecordOption;
use crate::Searcher;
use crate::Term;
use std::collections::BTreeMap;
use std::fmt;
/// A Term query matches all of the documents
/// containing a specific term.
///
/// The score associated is defined as
/// `idf` * sqrt(`term_freq` / `field norm`)
/// in which :
/// * `idf` - inverse document frequency.
/// * `term_freq` - number of occurrences of the term in the field
/// * `field norm` - number of tokens in the field.
///
/// ```rust
/// use tantivy::collector::{Count, TopDocs};
/// use tantivy::query::TermQuery;
/// use tantivy::schema::{Schema, TEXT, IndexRecordOption};
/// use tantivy::{doc, Index, Term};
/// # fn test() -> tantivy::Result<()> {
/// let mut schema_builder = Schema::builder();
/// let title = schema_builder.add_text_field("title", TEXT);
/// let schema = schema_builder.build();
/// let index = Index::create_in_ram(schema);
/// {
/// let mut index_writer = index.writer(3_000_000)?;
/// index_writer.add_document(doc!(
/// title => "The Name of the Wind",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "The Diary of Muadib",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "A Dairy Cow",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "The Diary of a Young Girl",
/// ))?;
/// index_writer.commit()?;
/// }
/// let reader = index.reader()?;
/// let searcher = reader.searcher();
/// let query = TermQuery::new(
/// Term::from_field_text(title, "diary"), | /// );
/// let (top_docs, count) = searcher.search(&query, &(TopDocs::with_limit(2), Count))?;
/// assert_eq!(count, 2);
/// Ok(())
/// # }
/// # assert!(test().is_ok());
/// ```
#[derive(Clone)]
pub struct TermQuery {
term: Term,
index_record_option: IndexRecordOption,
}
impl fmt::Debug for TermQuery {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TermQuery({:?})", self.term)
}
}
impl TermQuery {
/// Creates a new term query.
pub fn new(term: Term, segment_postings_options: IndexRecordOption) -> TermQuery {
TermQuery {
term,
index_record_option: segment_postings_options,
}
}
/// The `Term` this query is built out of.
pub fn term(&self) -> &Term {
&self.term
}
/// Returns a weight object.
///
/// While `.weight(...)` returns a boxed trait object,
/// this method return a specific implementation.
/// This is useful for optimization purpose.
pub fn specialized_weight(
&self,
searcher: &Searcher,
scoring_enabled: bool,
) -> crate::Result<TermWeight> {
let field_entry = searcher.schema().get_field_entry(self.term.field());
if!field_entry.is_indexed() {
let error_msg = format!("Field {:?} is not indexed.", field_entry.name());
return Err(crate::TantivyError::SchemaError(error_msg));
}
let bm25_weight = if scoring_enabled {
Bm25Weight::for_terms(searcher, &[self.term.clone()])?
} else {
Bm25Weight::new(Explanation::new("<no score>".to_string(), 1.0f32), 1.0f32)
};
let index_record_option = if scoring_enabled {
self.index_record_option
} else {
IndexRecordOption::Basic
};
Ok(TermWeight::new(
self.term.clone(),
index_record_option,
bm25_weight,
scoring_enabled,
))
}
}
impl Query for TermQuery {
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
Ok(Box::new(
self.specialized_weight(searcher, scoring_enabled)?,
))
}
fn query_terms(&self, terms: &mut BTreeMap<Term, bool>) {
terms.insert(self.term.clone(), false);
}
} | /// IndexRecordOption::Basic, | random_line_split |
term_query.rs | use super::term_weight::TermWeight;
use crate::query::bm25::Bm25Weight;
use crate::query::Weight;
use crate::query::{Explanation, Query};
use crate::schema::IndexRecordOption;
use crate::Searcher;
use crate::Term;
use std::collections::BTreeMap;
use std::fmt;
/// A Term query matches all of the documents
/// containing a specific term.
///
/// The score associated is defined as
/// `idf` * sqrt(`term_freq` / `field norm`)
/// in which :
/// * `idf` - inverse document frequency.
/// * `term_freq` - number of occurrences of the term in the field
/// * `field norm` - number of tokens in the field.
///
/// ```rust
/// use tantivy::collector::{Count, TopDocs};
/// use tantivy::query::TermQuery;
/// use tantivy::schema::{Schema, TEXT, IndexRecordOption};
/// use tantivy::{doc, Index, Term};
/// # fn test() -> tantivy::Result<()> {
/// let mut schema_builder = Schema::builder();
/// let title = schema_builder.add_text_field("title", TEXT);
/// let schema = schema_builder.build();
/// let index = Index::create_in_ram(schema);
/// {
/// let mut index_writer = index.writer(3_000_000)?;
/// index_writer.add_document(doc!(
/// title => "The Name of the Wind",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "The Diary of Muadib",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "A Dairy Cow",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "The Diary of a Young Girl",
/// ))?;
/// index_writer.commit()?;
/// }
/// let reader = index.reader()?;
/// let searcher = reader.searcher();
/// let query = TermQuery::new(
/// Term::from_field_text(title, "diary"),
/// IndexRecordOption::Basic,
/// );
/// let (top_docs, count) = searcher.search(&query, &(TopDocs::with_limit(2), Count))?;
/// assert_eq!(count, 2);
/// Ok(())
/// # }
/// # assert!(test().is_ok());
/// ```
#[derive(Clone)]
pub struct TermQuery {
term: Term,
index_record_option: IndexRecordOption,
}
impl fmt::Debug for TermQuery {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TermQuery({:?})", self.term)
}
}
impl TermQuery {
/// Creates a new term query.
pub fn new(term: Term, segment_postings_options: IndexRecordOption) -> TermQuery {
TermQuery {
term,
index_record_option: segment_postings_options,
}
}
/// The `Term` this query is built out of.
pub fn term(&self) -> &Term {
&self.term
}
/// Returns a weight object.
///
/// While `.weight(...)` returns a boxed trait object,
/// this method return a specific implementation.
/// This is useful for optimization purpose.
pub fn specialized_weight(
&self,
searcher: &Searcher,
scoring_enabled: bool,
) -> crate::Result<TermWeight> {
let field_entry = searcher.schema().get_field_entry(self.term.field());
if!field_entry.is_indexed() {
let error_msg = format!("Field {:?} is not indexed.", field_entry.name());
return Err(crate::TantivyError::SchemaError(error_msg));
}
let bm25_weight = if scoring_enabled {
Bm25Weight::for_terms(searcher, &[self.term.clone()])?
} else {
Bm25Weight::new(Explanation::new("<no score>".to_string(), 1.0f32), 1.0f32)
};
let index_record_option = if scoring_enabled {
self.index_record_option
} else | ;
Ok(TermWeight::new(
self.term.clone(),
index_record_option,
bm25_weight,
scoring_enabled,
))
}
}
impl Query for TermQuery {
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
Ok(Box::new(
self.specialized_weight(searcher, scoring_enabled)?,
))
}
fn query_terms(&self, terms: &mut BTreeMap<Term, bool>) {
terms.insert(self.term.clone(), false);
}
}
| {
IndexRecordOption::Basic
} | conditional_block |
term_query.rs | use super::term_weight::TermWeight;
use crate::query::bm25::Bm25Weight;
use crate::query::Weight;
use crate::query::{Explanation, Query};
use crate::schema::IndexRecordOption;
use crate::Searcher;
use crate::Term;
use std::collections::BTreeMap;
use std::fmt;
/// A Term query matches all of the documents
/// containing a specific term.
///
/// The score associated is defined as
/// `idf` * sqrt(`term_freq` / `field norm`)
/// in which :
/// * `idf` - inverse document frequency.
/// * `term_freq` - number of occurrences of the term in the field
/// * `field norm` - number of tokens in the field.
///
/// ```rust
/// use tantivy::collector::{Count, TopDocs};
/// use tantivy::query::TermQuery;
/// use tantivy::schema::{Schema, TEXT, IndexRecordOption};
/// use tantivy::{doc, Index, Term};
/// # fn test() -> tantivy::Result<()> {
/// let mut schema_builder = Schema::builder();
/// let title = schema_builder.add_text_field("title", TEXT);
/// let schema = schema_builder.build();
/// let index = Index::create_in_ram(schema);
/// {
/// let mut index_writer = index.writer(3_000_000)?;
/// index_writer.add_document(doc!(
/// title => "The Name of the Wind",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "The Diary of Muadib",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "A Dairy Cow",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "The Diary of a Young Girl",
/// ))?;
/// index_writer.commit()?;
/// }
/// let reader = index.reader()?;
/// let searcher = reader.searcher();
/// let query = TermQuery::new(
/// Term::from_field_text(title, "diary"),
/// IndexRecordOption::Basic,
/// );
/// let (top_docs, count) = searcher.search(&query, &(TopDocs::with_limit(2), Count))?;
/// assert_eq!(count, 2);
/// Ok(())
/// # }
/// # assert!(test().is_ok());
/// ```
#[derive(Clone)]
pub struct TermQuery {
term: Term,
index_record_option: IndexRecordOption,
}
impl fmt::Debug for TermQuery {
fn | (&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TermQuery({:?})", self.term)
}
}
impl TermQuery {
/// Creates a new term query.
pub fn new(term: Term, segment_postings_options: IndexRecordOption) -> TermQuery {
TermQuery {
term,
index_record_option: segment_postings_options,
}
}
/// The `Term` this query is built out of.
pub fn term(&self) -> &Term {
&self.term
}
/// Returns a weight object.
///
/// While `.weight(...)` returns a boxed trait object,
/// this method return a specific implementation.
/// This is useful for optimization purpose.
pub fn specialized_weight(
&self,
searcher: &Searcher,
scoring_enabled: bool,
) -> crate::Result<TermWeight> {
let field_entry = searcher.schema().get_field_entry(self.term.field());
if!field_entry.is_indexed() {
let error_msg = format!("Field {:?} is not indexed.", field_entry.name());
return Err(crate::TantivyError::SchemaError(error_msg));
}
let bm25_weight = if scoring_enabled {
Bm25Weight::for_terms(searcher, &[self.term.clone()])?
} else {
Bm25Weight::new(Explanation::new("<no score>".to_string(), 1.0f32), 1.0f32)
};
let index_record_option = if scoring_enabled {
self.index_record_option
} else {
IndexRecordOption::Basic
};
Ok(TermWeight::new(
self.term.clone(),
index_record_option,
bm25_weight,
scoring_enabled,
))
}
}
impl Query for TermQuery {
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
Ok(Box::new(
self.specialized_weight(searcher, scoring_enabled)?,
))
}
fn query_terms(&self, terms: &mut BTreeMap<Term, bool>) {
terms.insert(self.term.clone(), false);
}
}
| fmt | identifier_name |
term_query.rs | use super::term_weight::TermWeight;
use crate::query::bm25::Bm25Weight;
use crate::query::Weight;
use crate::query::{Explanation, Query};
use crate::schema::IndexRecordOption;
use crate::Searcher;
use crate::Term;
use std::collections::BTreeMap;
use std::fmt;
/// A Term query matches all of the documents
/// containing a specific term.
///
/// The score associated is defined as
/// `idf` * sqrt(`term_freq` / `field norm`)
/// in which :
/// * `idf` - inverse document frequency.
/// * `term_freq` - number of occurrences of the term in the field
/// * `field norm` - number of tokens in the field.
///
/// ```rust
/// use tantivy::collector::{Count, TopDocs};
/// use tantivy::query::TermQuery;
/// use tantivy::schema::{Schema, TEXT, IndexRecordOption};
/// use tantivy::{doc, Index, Term};
/// # fn test() -> tantivy::Result<()> {
/// let mut schema_builder = Schema::builder();
/// let title = schema_builder.add_text_field("title", TEXT);
/// let schema = schema_builder.build();
/// let index = Index::create_in_ram(schema);
/// {
/// let mut index_writer = index.writer(3_000_000)?;
/// index_writer.add_document(doc!(
/// title => "The Name of the Wind",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "The Diary of Muadib",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "A Dairy Cow",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "The Diary of a Young Girl",
/// ))?;
/// index_writer.commit()?;
/// }
/// let reader = index.reader()?;
/// let searcher = reader.searcher();
/// let query = TermQuery::new(
/// Term::from_field_text(title, "diary"),
/// IndexRecordOption::Basic,
/// );
/// let (top_docs, count) = searcher.search(&query, &(TopDocs::with_limit(2), Count))?;
/// assert_eq!(count, 2);
/// Ok(())
/// # }
/// # assert!(test().is_ok());
/// ```
#[derive(Clone)]
pub struct TermQuery {
term: Term,
index_record_option: IndexRecordOption,
}
impl fmt::Debug for TermQuery {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TermQuery({:?})", self.term)
}
}
impl TermQuery {
/// Creates a new term query.
pub fn new(term: Term, segment_postings_options: IndexRecordOption) -> TermQuery {
TermQuery {
term,
index_record_option: segment_postings_options,
}
}
/// The `Term` this query is built out of.
pub fn term(&self) -> &Term |
/// Returns a weight object.
///
/// While `.weight(...)` returns a boxed trait object,
/// this method return a specific implementation.
/// This is useful for optimization purpose.
pub fn specialized_weight(
&self,
searcher: &Searcher,
scoring_enabled: bool,
) -> crate::Result<TermWeight> {
let field_entry = searcher.schema().get_field_entry(self.term.field());
if!field_entry.is_indexed() {
let error_msg = format!("Field {:?} is not indexed.", field_entry.name());
return Err(crate::TantivyError::SchemaError(error_msg));
}
let bm25_weight = if scoring_enabled {
Bm25Weight::for_terms(searcher, &[self.term.clone()])?
} else {
Bm25Weight::new(Explanation::new("<no score>".to_string(), 1.0f32), 1.0f32)
};
let index_record_option = if scoring_enabled {
self.index_record_option
} else {
IndexRecordOption::Basic
};
Ok(TermWeight::new(
self.term.clone(),
index_record_option,
bm25_weight,
scoring_enabled,
))
}
}
impl Query for TermQuery {
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
Ok(Box::new(
self.specialized_weight(searcher, scoring_enabled)?,
))
}
fn query_terms(&self, terms: &mut BTreeMap<Term, bool>) {
terms.insert(self.term.clone(), false);
}
}
| {
&self.term
} | identifier_body |
primitives.rs | use sdl2::render::Renderer;
use matrix::*;
pub struct Point2D {
pub x: f32,
pub y: f32,
}
impl Point2D {
pub fn new(x: f32, y: f32) -> Point2D {
Point2D {
x: x,
y: y,
}
}
}
pub trait Primitive2D {
fn to_matrix(&self) -> Matrix;
fn from_matrix(&mut self, m: &Matrix);
fn draw(&self, renderer: &Renderer);
fn translate(&mut self, dx: f32, dy: f32) {
let obj = self.to_matrix();
let (x, y) = (obj.matrix[2][0], obj.matrix[2][1]);
self.from_matrix(
&(obj *
translation_matrix(-x, -y) *
translation_matrix(dx, dy) *
translation_matrix(x, y))
);
}
fn scale(&mut self, sx: f32, sy: f32) {
let obj = self.to_matrix();
let (x, y) = (obj.matrix[2][0], obj.matrix[2][1]);
self.from_matrix(
&(obj *
translation_matrix(-x, -y) *
scale_matrix(sx, sy) *
| }
fn rotate(&mut self, angle: f32) {
let obj = self.to_matrix();
let (x, y) = (obj.matrix[2][0], obj.matrix[2][1]);
self.from_matrix(
&(obj *
translation_matrix(-x, -y) *
rotation_matrix(angle) *
translation_matrix(x, y))
);
}
} | translation_matrix(x, y))
);
| random_line_split |
primitives.rs | use sdl2::render::Renderer;
use matrix::*;
pub struct | {
pub x: f32,
pub y: f32,
}
impl Point2D {
pub fn new(x: f32, y: f32) -> Point2D {
Point2D {
x: x,
y: y,
}
}
}
pub trait Primitive2D {
fn to_matrix(&self) -> Matrix;
fn from_matrix(&mut self, m: &Matrix);
fn draw(&self, renderer: &Renderer);
fn translate(&mut self, dx: f32, dy: f32) {
let obj = self.to_matrix();
let (x, y) = (obj.matrix[2][0], obj.matrix[2][1]);
self.from_matrix(
&(obj *
translation_matrix(-x, -y) *
translation_matrix(dx, dy) *
translation_matrix(x, y))
);
}
fn scale(&mut self, sx: f32, sy: f32) {
let obj = self.to_matrix();
let (x, y) = (obj.matrix[2][0], obj.matrix[2][1]);
self.from_matrix(
&(obj *
translation_matrix(-x, -y) *
scale_matrix(sx, sy) *
translation_matrix(x, y))
);
}
fn rotate(&mut self, angle: f32) {
let obj = self.to_matrix();
let (x, y) = (obj.matrix[2][0], obj.matrix[2][1]);
self.from_matrix(
&(obj *
translation_matrix(-x, -y) *
rotation_matrix(angle) *
translation_matrix(x, y))
);
}
}
| Point2D | identifier_name |
primitives.rs | use sdl2::render::Renderer;
use matrix::*;
pub struct Point2D {
pub x: f32,
pub y: f32,
}
impl Point2D {
pub fn new(x: f32, y: f32) -> Point2D {
Point2D {
x: x,
y: y,
}
}
}
pub trait Primitive2D {
fn to_matrix(&self) -> Matrix;
fn from_matrix(&mut self, m: &Matrix);
fn draw(&self, renderer: &Renderer);
fn translate(&mut self, dx: f32, dy: f32) {
let obj = self.to_matrix();
let (x, y) = (obj.matrix[2][0], obj.matrix[2][1]);
self.from_matrix(
&(obj *
translation_matrix(-x, -y) *
translation_matrix(dx, dy) *
translation_matrix(x, y))
);
}
fn scale(&mut self, sx: f32, sy: f32) {
let obj = self.to_matrix();
let (x, y) = (obj.matrix[2][0], obj.matrix[2][1]);
self.from_matrix(
&(obj *
translation_matrix(-x, -y) *
scale_matrix(sx, sy) *
translation_matrix(x, y))
);
}
fn rotate(&mut self, angle: f32) |
}
| {
let obj = self.to_matrix();
let (x, y) = (obj.matrix[2][0], obj.matrix[2][1]);
self.from_matrix(
&(obj *
translation_matrix(-x, -y) *
rotation_matrix(angle) *
translation_matrix(x, y))
);
} | identifier_body |
main.rs | #![feature(plugin)]
#![plugin(rocket_codegen)]
#![feature(custom_derive)]
extern crate chrono;
extern crate rocket;
extern crate postgres;
extern crate serde_json;
extern crate rocket_contrib;
#[macro_use] extern crate serde_derive;
mod webapp_config;
mod date_field;
use std::env;
use std::path::{Path, PathBuf};
use postgres::{Connection, TlsMode};
use chrono::{DateTime, UTC};
use rocket_contrib::JSON;
use rocket::response::{content, NamedFile};
#[macro_use]
extern crate lazy_static;
lazy_static!{
static ref DATABASE_URI: String = env::var("GDQ_DATABASE_URI").unwrap();
static ref CURRENT_EVENT_ID: String = env::var("GDQ_LIVE_EVENT_ID").unwrap();
}
static DONATAION_DATA_QUERY: &'static str = "SELECT id, timestamp, donation_count, donation_total, historic_total FROM DonationEntry WHERE event_id = $1 ORDER BY timestamp ASC";
static DONATAION_DATA_UPDATE_QUERY: &'static str = "SELECT id, timestamp, donation_count, donation_total, historic_total FROM DonationEntry WHERE event_id = $1 AND timestamp > $2 ORDER BY timestamp ASC";
#[derive(Serialize)]
struct DonationEntry {
timestamp: DateTime<UTC>,
count: i32,
total: i32,
total_2016: i32,
}
#[derive(Serialize)]
struct DataResponse(Vec<DonationEntry>);
#[derive(FromForm)]
struct DonationQuery {
since: date_field::DateField,
}
#[get("/")]
fn index() -> content::HTML<String> {
let response_str = format!(include_str!("index.html"), static_base=webapp_config::get_static_base());
content::HTML(response_str)
}
#[get("/donation_data")]
fn get_donation_data() -> JSON<DataResponse> {
let database_uri: &str = &*DATABASE_URI;
let db_connection = Connection::connect(database_uri, TlsMode::None).unwrap();
let query_result = db_connection.query(DONATAION_DATA_QUERY, &[&*CURRENT_EVENT_ID]).unwrap();
let result: Vec<DonationEntry> = query_result.iter().map(|row| DonationEntry { timestamp: row.get(1), count: row.get(2), total: row.get(3), total_2016: row.get(4) }).collect();
JSON(DataResponse(result))
}
#[get("/donation_data/update?<update_form>")]
fn get_donation_data_update(update_form: DonationQuery) -> JSON<DataResponse> {
let database_uri: &str = &*DATABASE_URI;
let db_connection = Connection::connect(database_uri, TlsMode::None).unwrap();
let date_field::DateField(since_date) = update_form.since;
let query_result = db_connection.query(DONATAION_DATA_UPDATE_QUERY, &[&*CURRENT_EVENT_ID, &since_date]).unwrap();
let result: Vec<DonationEntry> = query_result.iter().map(|row| DonationEntry { timestamp: row.get(1), count: row.get(2), total: row.get(3), total_2016: row.get(4) }).collect();
JSON(DataResponse(result))
}
#[get("/static/<file..>")]
fn | (file: PathBuf) -> Option<NamedFile> {
NamedFile::open(Path::new("static/").join(file)).ok()
}
fn main() {
if webapp_config::use_local_static_handler() {
rocket::ignite().mount("/", routes![index, get_donation_data, get_donation_data_update, static_files]).launch()
} else {
rocket::ignite().mount("/", routes![index, get_donation_data, get_donation_data_update]).launch()
}
} | static_files | identifier_name |
main.rs | #![feature(plugin)]
#![plugin(rocket_codegen)]
#![feature(custom_derive)]
extern crate chrono;
extern crate rocket;
extern crate postgres;
extern crate serde_json;
extern crate rocket_contrib;
#[macro_use] extern crate serde_derive;
mod webapp_config;
mod date_field;
use std::env;
use std::path::{Path, PathBuf};
use postgres::{Connection, TlsMode};
use chrono::{DateTime, UTC};
| use rocket::response::{content, NamedFile};
#[macro_use]
extern crate lazy_static;
lazy_static!{
static ref DATABASE_URI: String = env::var("GDQ_DATABASE_URI").unwrap();
static ref CURRENT_EVENT_ID: String = env::var("GDQ_LIVE_EVENT_ID").unwrap();
}
static DONATAION_DATA_QUERY: &'static str = "SELECT id, timestamp, donation_count, donation_total, historic_total FROM DonationEntry WHERE event_id = $1 ORDER BY timestamp ASC";
static DONATAION_DATA_UPDATE_QUERY: &'static str = "SELECT id, timestamp, donation_count, donation_total, historic_total FROM DonationEntry WHERE event_id = $1 AND timestamp > $2 ORDER BY timestamp ASC";
#[derive(Serialize)]
struct DonationEntry {
timestamp: DateTime<UTC>,
count: i32,
total: i32,
total_2016: i32,
}
#[derive(Serialize)]
struct DataResponse(Vec<DonationEntry>);
#[derive(FromForm)]
struct DonationQuery {
since: date_field::DateField,
}
#[get("/")]
fn index() -> content::HTML<String> {
let response_str = format!(include_str!("index.html"), static_base=webapp_config::get_static_base());
content::HTML(response_str)
}
#[get("/donation_data")]
fn get_donation_data() -> JSON<DataResponse> {
let database_uri: &str = &*DATABASE_URI;
let db_connection = Connection::connect(database_uri, TlsMode::None).unwrap();
let query_result = db_connection.query(DONATAION_DATA_QUERY, &[&*CURRENT_EVENT_ID]).unwrap();
let result: Vec<DonationEntry> = query_result.iter().map(|row| DonationEntry { timestamp: row.get(1), count: row.get(2), total: row.get(3), total_2016: row.get(4) }).collect();
JSON(DataResponse(result))
}
#[get("/donation_data/update?<update_form>")]
fn get_donation_data_update(update_form: DonationQuery) -> JSON<DataResponse> {
let database_uri: &str = &*DATABASE_URI;
let db_connection = Connection::connect(database_uri, TlsMode::None).unwrap();
let date_field::DateField(since_date) = update_form.since;
let query_result = db_connection.query(DONATAION_DATA_UPDATE_QUERY, &[&*CURRENT_EVENT_ID, &since_date]).unwrap();
let result: Vec<DonationEntry> = query_result.iter().map(|row| DonationEntry { timestamp: row.get(1), count: row.get(2), total: row.get(3), total_2016: row.get(4) }).collect();
JSON(DataResponse(result))
}
#[get("/static/<file..>")]
fn static_files(file: PathBuf) -> Option<NamedFile> {
NamedFile::open(Path::new("static/").join(file)).ok()
}
fn main() {
if webapp_config::use_local_static_handler() {
rocket::ignite().mount("/", routes![index, get_donation_data, get_donation_data_update, static_files]).launch()
} else {
rocket::ignite().mount("/", routes![index, get_donation_data, get_donation_data_update]).launch()
}
} | use rocket_contrib::JSON; | random_line_split |
main.rs | #![feature(plugin)]
#![plugin(rocket_codegen)]
#![feature(custom_derive)]
extern crate chrono;
extern crate rocket;
extern crate postgres;
extern crate serde_json;
extern crate rocket_contrib;
#[macro_use] extern crate serde_derive;
mod webapp_config;
mod date_field;
use std::env;
use std::path::{Path, PathBuf};
use postgres::{Connection, TlsMode};
use chrono::{DateTime, UTC};
use rocket_contrib::JSON;
use rocket::response::{content, NamedFile};
#[macro_use]
extern crate lazy_static;
lazy_static!{
static ref DATABASE_URI: String = env::var("GDQ_DATABASE_URI").unwrap();
static ref CURRENT_EVENT_ID: String = env::var("GDQ_LIVE_EVENT_ID").unwrap();
}
static DONATAION_DATA_QUERY: &'static str = "SELECT id, timestamp, donation_count, donation_total, historic_total FROM DonationEntry WHERE event_id = $1 ORDER BY timestamp ASC";
static DONATAION_DATA_UPDATE_QUERY: &'static str = "SELECT id, timestamp, donation_count, donation_total, historic_total FROM DonationEntry WHERE event_id = $1 AND timestamp > $2 ORDER BY timestamp ASC";
#[derive(Serialize)]
struct DonationEntry {
timestamp: DateTime<UTC>,
count: i32,
total: i32,
total_2016: i32,
}
#[derive(Serialize)]
struct DataResponse(Vec<DonationEntry>);
#[derive(FromForm)]
struct DonationQuery {
since: date_field::DateField,
}
#[get("/")]
fn index() -> content::HTML<String> {
let response_str = format!(include_str!("index.html"), static_base=webapp_config::get_static_base());
content::HTML(response_str)
}
#[get("/donation_data")]
fn get_donation_data() -> JSON<DataResponse> {
let database_uri: &str = &*DATABASE_URI;
let db_connection = Connection::connect(database_uri, TlsMode::None).unwrap();
let query_result = db_connection.query(DONATAION_DATA_QUERY, &[&*CURRENT_EVENT_ID]).unwrap();
let result: Vec<DonationEntry> = query_result.iter().map(|row| DonationEntry { timestamp: row.get(1), count: row.get(2), total: row.get(3), total_2016: row.get(4) }).collect();
JSON(DataResponse(result))
}
#[get("/donation_data/update?<update_form>")]
fn get_donation_data_update(update_form: DonationQuery) -> JSON<DataResponse> {
let database_uri: &str = &*DATABASE_URI;
let db_connection = Connection::connect(database_uri, TlsMode::None).unwrap();
let date_field::DateField(since_date) = update_form.since;
let query_result = db_connection.query(DONATAION_DATA_UPDATE_QUERY, &[&*CURRENT_EVENT_ID, &since_date]).unwrap();
let result: Vec<DonationEntry> = query_result.iter().map(|row| DonationEntry { timestamp: row.get(1), count: row.get(2), total: row.get(3), total_2016: row.get(4) }).collect();
JSON(DataResponse(result))
}
#[get("/static/<file..>")]
fn static_files(file: PathBuf) -> Option<NamedFile> {
NamedFile::open(Path::new("static/").join(file)).ok()
}
fn main() | {
if webapp_config::use_local_static_handler() {
rocket::ignite().mount("/", routes![index, get_donation_data, get_donation_data_update, static_files]).launch()
} else {
rocket::ignite().mount("/", routes![index, get_donation_data, get_donation_data_update]).launch()
}
} | identifier_body |
|
main.rs | #![feature(plugin)]
#![plugin(rocket_codegen)]
#![feature(custom_derive)]
extern crate chrono;
extern crate rocket;
extern crate postgres;
extern crate serde_json;
extern crate rocket_contrib;
#[macro_use] extern crate serde_derive;
mod webapp_config;
mod date_field;
use std::env;
use std::path::{Path, PathBuf};
use postgres::{Connection, TlsMode};
use chrono::{DateTime, UTC};
use rocket_contrib::JSON;
use rocket::response::{content, NamedFile};
#[macro_use]
extern crate lazy_static;
lazy_static!{
static ref DATABASE_URI: String = env::var("GDQ_DATABASE_URI").unwrap();
static ref CURRENT_EVENT_ID: String = env::var("GDQ_LIVE_EVENT_ID").unwrap();
}
static DONATAION_DATA_QUERY: &'static str = "SELECT id, timestamp, donation_count, donation_total, historic_total FROM DonationEntry WHERE event_id = $1 ORDER BY timestamp ASC";
static DONATAION_DATA_UPDATE_QUERY: &'static str = "SELECT id, timestamp, donation_count, donation_total, historic_total FROM DonationEntry WHERE event_id = $1 AND timestamp > $2 ORDER BY timestamp ASC";
#[derive(Serialize)]
struct DonationEntry {
timestamp: DateTime<UTC>,
count: i32,
total: i32,
total_2016: i32,
}
#[derive(Serialize)]
struct DataResponse(Vec<DonationEntry>);
#[derive(FromForm)]
struct DonationQuery {
since: date_field::DateField,
}
#[get("/")]
fn index() -> content::HTML<String> {
let response_str = format!(include_str!("index.html"), static_base=webapp_config::get_static_base());
content::HTML(response_str)
}
#[get("/donation_data")]
fn get_donation_data() -> JSON<DataResponse> {
let database_uri: &str = &*DATABASE_URI;
let db_connection = Connection::connect(database_uri, TlsMode::None).unwrap();
let query_result = db_connection.query(DONATAION_DATA_QUERY, &[&*CURRENT_EVENT_ID]).unwrap();
let result: Vec<DonationEntry> = query_result.iter().map(|row| DonationEntry { timestamp: row.get(1), count: row.get(2), total: row.get(3), total_2016: row.get(4) }).collect();
JSON(DataResponse(result))
}
#[get("/donation_data/update?<update_form>")]
fn get_donation_data_update(update_form: DonationQuery) -> JSON<DataResponse> {
let database_uri: &str = &*DATABASE_URI;
let db_connection = Connection::connect(database_uri, TlsMode::None).unwrap();
let date_field::DateField(since_date) = update_form.since;
let query_result = db_connection.query(DONATAION_DATA_UPDATE_QUERY, &[&*CURRENT_EVENT_ID, &since_date]).unwrap();
let result: Vec<DonationEntry> = query_result.iter().map(|row| DonationEntry { timestamp: row.get(1), count: row.get(2), total: row.get(3), total_2016: row.get(4) }).collect();
JSON(DataResponse(result))
}
#[get("/static/<file..>")]
fn static_files(file: PathBuf) -> Option<NamedFile> {
NamedFile::open(Path::new("static/").join(file)).ok()
}
fn main() {
if webapp_config::use_local_static_handler() | else {
rocket::ignite().mount("/", routes![index, get_donation_data, get_donation_data_update]).launch()
}
} | {
rocket::ignite().mount("/", routes![index, get_donation_data, get_donation_data_update, static_files]).launch()
} | conditional_block |
plugin_crate_outlive_expansion_phase.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// force-host
#![feature(plugin_registrar)]
extern crate rustc; |
use std::any::Any;
use std::cell::RefCell;
use rustc::plugin::Registry;
struct Foo {
foo: int
}
impl Drop for Foo {
fn drop(&mut self) {}
}
#[plugin_registrar]
pub fn registrar(_: &mut Registry) {
thread_local!(static FOO: RefCell<Option<Box<Any+Send>>> = RefCell::new(None));
FOO.with(|s| *s.borrow_mut() = Some(box Foo { foo: 10 } as Box<Any+Send>));
} | random_line_split |
|
plugin_crate_outlive_expansion_phase.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// force-host
#![feature(plugin_registrar)]
extern crate rustc;
use std::any::Any;
use std::cell::RefCell;
use rustc::plugin::Registry;
struct | {
foo: int
}
impl Drop for Foo {
fn drop(&mut self) {}
}
#[plugin_registrar]
pub fn registrar(_: &mut Registry) {
thread_local!(static FOO: RefCell<Option<Box<Any+Send>>> = RefCell::new(None));
FOO.with(|s| *s.borrow_mut() = Some(box Foo { foo: 10 } as Box<Any+Send>));
}
| Foo | identifier_name |
imports.rs | use super::namespaces;
use crate::{library::Library, nameutil::crate_name, version::Version};
use std::borrow::Cow;
use std::cmp::Ordering;
use std::collections::btree_map::BTreeMap;
use std::collections::HashSet;
use std::ops::{Deref, DerefMut};
use std::vec::IntoIter;
fn is_first_char_up(s: &str) -> bool {
s.chars().next().unwrap().is_uppercase()
}
fn check_up_eq(a: &str, b: &str) -> Ordering {
let is_a_up = is_first_char_up(a);
let is_b_up = is_first_char_up(b);
if is_a_up!= is_b_up {
if is_a_up {
return Ordering::Greater;
}
return Ordering::Less;
}
Ordering::Equal
}
/// This function is used by the `Imports` type to generate output like `cargo fmt` would.
///
/// For example:
///
/// ```text
/// use gdk; // lowercases come first.
/// use Window;
///
/// use gdk::foo; // lowercases come first here as well.
/// use gdk::Foo;
/// ```
#[allow(clippy::trivially_copy_pass_by_ref)] // because of the way this is used, the refs are needed
fn compare_imports(a: &(&String, &ImportConditions), b: &(&String, &ImportConditions)) -> Ordering {
let s = check_up_eq(a.0, b.0);
if s!= Ordering::Equal {
return s;
}
let mut a = a.0.split("::");
let mut b = b.0.split("::");
loop {
match (a.next(), b.next()) {
(Some(a), Some(b)) => {
let s = check_up_eq(a, b);
if s!= Ordering::Equal {
break s;
}
let s = a.partial_cmp(b).unwrap();
if s!= Ordering::Equal {
break s;
}
}
(Some(_), None) => break Ordering::Greater,
(None, Some(_)) => break Ordering::Less,
(None, None) => break Ordering::Equal,
}
}
}
/// Provides assistance in generating use declarations.
///
/// It takes into account that use declaration referring to names within the
/// same crate will look differently. It also avoids generating spurious
/// declarations referring to names from within the same module as the one we
/// are generating code for.
#[derive(Clone, Debug, Default)]
pub struct Imports {
/// Name of the current crate.
crate_name: String,
/// Names defined within current module. It doesn't need use declaration.
defined: HashSet<String>,
defaults: ImportConditions,
map: BTreeMap<String, ImportConditions>,
}
impl Imports {
pub fn new(gir: &Library) -> Imports {
Imports {
crate_name: make_crate_name(gir),
defined: HashSet::new(),
defaults: ImportConditions::default(),
map: BTreeMap::new(),
}
}
pub fn with_defined(gir: &Library, name: &str) -> Imports {
Imports {
crate_name: make_crate_name(gir),
defined: std::iter::once(name.to_owned()).collect(),
defaults: ImportConditions::default(),
map: BTreeMap::new(),
}
}
#[must_use = "ImportsWithDefault must live while defaults are needed"]
pub fn with_defaults(
&mut self,
version: Option<Version>,
constraint: &Option<String>,
) -> ImportsWithDefault<'_> {
let constraints = if let Some(constraint) = constraint {
vec![constraint.clone()]
} else {
vec![]
};
self.defaults = ImportConditions {
version,
constraints,
};
ImportsWithDefault::new(self)
}
fn reset_defaults(&mut self) {
self.defaults.clear();
}
/// The goals of this function is to discard unwanted imports like "crate". It
/// also extends the checks in case you are implementing "X". For example, you don't want to
/// import "X" or "crate::X" in this case.
fn common_checks(&self, name: &str) -> bool |
/// Declares that `name` is defined in scope
///
/// Removes existing imports from `self.map` and marks `name` as
/// available to counter future import "requests".
pub fn add_defined(&mut self, name: &str) {
if self.defined.insert(name.to_owned()) {
self.map.remove(name);
}
}
/// Declares that name should be available through its last path component.
///
/// For example, if name is `X::Y::Z` then it will be available as `Z`.
/// Uses defaults.
pub fn add(&mut self, name: &str) {
if!self.common_checks(name) {
return;
}
if let Some(mut name) = self.strip_crate_name(name) {
if name == "xlib" {
name = if self.crate_name == "gdk_x11" {
// Dirty little hack to allow to have correct import for GDKX11.
Cow::Borrowed("x11::xlib")
} else {
// gtk has a module named "xlib" which is why this hack is needed too.
Cow::Borrowed("crate::xlib")
};
}
let defaults = &self.defaults;
let entry = self
.map
.entry(name.into_owned())
.or_insert_with(|| defaults.clone());
entry.update_version(self.defaults.version);
entry.update_constraints(&self.defaults.constraints);
}
}
/// Declares that name should be available through its last path component.
///
/// For example, if name is `X::Y::Z` then it will be available as `Z`.
pub fn add_with_version(&mut self, name: &str, version: Option<Version>) {
if!self.common_checks(name) {
return;
}
if let Some(name) = self.strip_crate_name(name) {
let entry = self
.map
.entry(name.into_owned())
.or_insert(ImportConditions {
version,
constraints: Vec::new(),
});
entry.update_version(version);
// Since there is no constraint on this import, if any constraint
// is present, we can just remove it.
entry.constraints.clear();
}
}
/// Declares that name should be available through its last path component and provides
/// an optional feature constraint.
///
/// For example, if name is `X::Y::Z` then it will be available as `Z`.
pub fn add_with_constraint(
&mut self,
name: &str,
version: Option<Version>,
constraint: Option<&str>,
) {
if!self.common_checks(name) {
return;
}
if let Some(name) = self.strip_crate_name(name) {
let entry = if let Some(constraint) = constraint {
let constraint = String::from(constraint);
let entry = self
.map
.entry(name.into_owned())
.or_insert(ImportConditions {
version,
constraints: vec![constraint.clone()],
});
entry.add_constraint(constraint);
entry
} else {
let entry = self
.map
.entry(name.into_owned())
.or_insert(ImportConditions {
version,
constraints: Vec::new(),
});
// Since there is no constraint on this import, if any constraint
// is present, we can just remove it.
entry.constraints.clear();
entry
};
entry.update_version(version);
}
}
/// Declares that name should be available through its full path.
///
/// For example, if name is `X::Y` then it will be available as `X::Y`.
pub fn add_used_type(&mut self, used_type: &str) {
if let Some(i) = used_type.find("::") {
if i == 0 {
self.add(&used_type[2..]);
} else {
self.add(&used_type[..i]);
}
} else {
self.add(&format!("crate::{}", used_type));
}
}
pub fn add_used_types(&mut self, used_types: &[String]) {
for s in used_types {
self.add_used_type(s);
}
}
/// Declares that name should be available through its full path.
///
/// For example, if name is `X::Y` then it will be available as `X::Y`.
pub fn add_used_type_with_version(&mut self, used_type: &str, version: Option<Version>) {
if let Some(i) = used_type.find("::") {
if i == 0 {
self.add_with_version(&used_type[2..], version);
} else {
self.add_with_version(&used_type[..i], version);
}
} else {
self.add_with_version(&format!("crate::{}", used_type), version);
}
}
/// Tries to strip crate name prefix from given name.
///
/// Returns `None` if name matches crate name exactly. Otherwise returns
/// name with crate name prefix stripped or full name if there was no match.
fn strip_crate_name<'a>(&self, name: &'a str) -> Option<Cow<'a, str>> {
let prefix = &self.crate_name;
if!name.starts_with(prefix) {
return Some(Cow::Borrowed(name));
}
let rest = &name[prefix.len()..];
if rest.is_empty() {
None
} else if rest.starts_with("::") {
Some(Cow::Owned(format!("crate{}", rest)))
} else {
// It was false positive, return the whole name.
Some(Cow::Borrowed(name))
}
}
pub fn iter(&self) -> IntoIter<(&String, &ImportConditions)> {
let mut imports = self.map.iter().collect::<Vec<_>>();
imports.sort_by(compare_imports);
imports.into_iter()
}
}
pub struct ImportsWithDefault<'a> {
imports: &'a mut Imports,
}
impl<'a> ImportsWithDefault<'a> {
fn new(imports: &'a mut Imports) -> Self {
Self { imports }
}
}
impl Drop for ImportsWithDefault<'_> {
fn drop(&mut self) {
self.imports.reset_defaults()
}
}
impl Deref for ImportsWithDefault<'_> {
type Target = Imports;
fn deref(&self) -> &Self::Target {
self.imports
}
}
impl DerefMut for ImportsWithDefault<'_> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.imports
}
}
#[derive(Clone, Debug, Default)]
pub struct ImportConditions {
pub version: Option<Version>,
pub constraints: Vec<String>,
}
impl ImportConditions {
fn clear(&mut self) {
self.version = None;
self.constraints.clear();
}
fn update_version(&mut self, version: Option<Version>) {
if version < self.version {
self.version = version;
}
}
fn add_constraint(&mut self, constraint: String) {
// If the import is already present but doesn't have any constraint,
// we don't want to add one.
if self.constraints.is_empty() {
return;
}
// Otherwise, we just check if the constraint
// is already present or not before adding it.
if!self.constraints.iter().any(|x| x == &constraint) {
self.constraints.push(constraint);
}
}
fn update_constraints(&mut self, constraints: &[String]) {
// If the import is already present but doesn't have any constraint,
// we don't want to add one.
if self.constraints.is_empty() {
return;
}
if constraints.is_empty() {
// Since there is no constraint on this import, if any constraint
// is present, we can just remove it.
self.constraints.clear();
} else {
// Otherwise, we just check if the constraint
// is already present or not before adding it.
for constraint in constraints {
if!self.constraints.iter().any(|x| x == constraint) {
self.constraints.push(constraint.clone());
}
}
}
}
}
fn make_crate_name(gir: &Library) -> String {
if gir.is_glib_crate() {
crate_name("GLib")
} else {
crate_name(gir.namespace(namespaces::MAIN).name.as_str())
}
}
| {
// The ffi namespace is used directly, including it is a programmer error.
assert_ne!(name, "crate::ffi");
if (!name.contains("::") && name != "xlib") || self.defined.contains(name) {
false
} else if let Some(name) = name.strip_prefix("crate::") {
!self.defined.contains(name)
} else {
true
}
} | identifier_body |
imports.rs | use super::namespaces;
use crate::{library::Library, nameutil::crate_name, version::Version};
use std::borrow::Cow;
use std::cmp::Ordering;
use std::collections::btree_map::BTreeMap;
use std::collections::HashSet;
use std::ops::{Deref, DerefMut};
use std::vec::IntoIter;
fn is_first_char_up(s: &str) -> bool {
s.chars().next().unwrap().is_uppercase()
}
fn check_up_eq(a: &str, b: &str) -> Ordering {
let is_a_up = is_first_char_up(a);
let is_b_up = is_first_char_up(b);
if is_a_up!= is_b_up {
if is_a_up {
return Ordering::Greater;
}
return Ordering::Less;
}
Ordering::Equal
}
/// This function is used by the `Imports` type to generate output like `cargo fmt` would.
///
/// For example:
///
/// ```text
/// use gdk; // lowercases come first.
/// use Window;
///
/// use gdk::foo; // lowercases come first here as well.
/// use gdk::Foo;
/// ```
#[allow(clippy::trivially_copy_pass_by_ref)] // because of the way this is used, the refs are needed
fn compare_imports(a: &(&String, &ImportConditions), b: &(&String, &ImportConditions)) -> Ordering {
let s = check_up_eq(a.0, b.0);
if s!= Ordering::Equal {
return s;
}
let mut a = a.0.split("::");
let mut b = b.0.split("::");
loop {
match (a.next(), b.next()) {
(Some(a), Some(b)) => {
let s = check_up_eq(a, b);
if s!= Ordering::Equal {
break s;
}
let s = a.partial_cmp(b).unwrap();
if s!= Ordering::Equal {
break s;
}
}
(Some(_), None) => break Ordering::Greater,
(None, Some(_)) => break Ordering::Less,
(None, None) => break Ordering::Equal,
}
}
}
/// Provides assistance in generating use declarations.
///
/// It takes into account that use declaration referring to names within the
/// same crate will look differently. It also avoids generating spurious
/// declarations referring to names from within the same module as the one we
/// are generating code for.
#[derive(Clone, Debug, Default)]
pub struct Imports {
/// Name of the current crate.
crate_name: String,
/// Names defined within current module. It doesn't need use declaration.
defined: HashSet<String>,
defaults: ImportConditions,
map: BTreeMap<String, ImportConditions>,
}
impl Imports {
pub fn new(gir: &Library) -> Imports {
Imports {
crate_name: make_crate_name(gir),
defined: HashSet::new(),
defaults: ImportConditions::default(),
map: BTreeMap::new(),
}
}
pub fn with_defined(gir: &Library, name: &str) -> Imports {
Imports {
crate_name: make_crate_name(gir),
defined: std::iter::once(name.to_owned()).collect(),
defaults: ImportConditions::default(),
map: BTreeMap::new(),
}
}
#[must_use = "ImportsWithDefault must live while defaults are needed"]
pub fn with_defaults(
&mut self,
version: Option<Version>,
constraint: &Option<String>,
) -> ImportsWithDefault<'_> {
let constraints = if let Some(constraint) = constraint {
vec![constraint.clone()]
} else {
vec![]
};
self.defaults = ImportConditions {
version,
constraints,
};
ImportsWithDefault::new(self)
}
fn reset_defaults(&mut self) {
self.defaults.clear();
}
/// The goals of this function is to discard unwanted imports like "crate". It
/// also extends the checks in case you are implementing "X". For example, you don't want to
/// import "X" or "crate::X" in this case.
fn common_checks(&self, name: &str) -> bool {
// The ffi namespace is used directly, including it is a programmer error.
assert_ne!(name, "crate::ffi");
if (!name.contains("::") && name!= "xlib") || self.defined.contains(name) {
false
} else if let Some(name) = name.strip_prefix("crate::") {
!self.defined.contains(name)
} else {
true
}
}
/// Declares that `name` is defined in scope
///
/// Removes existing imports from `self.map` and marks `name` as
/// available to counter future import "requests".
pub fn add_defined(&mut self, name: &str) {
if self.defined.insert(name.to_owned()) {
self.map.remove(name);
}
}
/// Declares that name should be available through its last path component.
///
/// For example, if name is `X::Y::Z` then it will be available as `Z`.
/// Uses defaults.
pub fn | (&mut self, name: &str) {
if!self.common_checks(name) {
return;
}
if let Some(mut name) = self.strip_crate_name(name) {
if name == "xlib" {
name = if self.crate_name == "gdk_x11" {
// Dirty little hack to allow to have correct import for GDKX11.
Cow::Borrowed("x11::xlib")
} else {
// gtk has a module named "xlib" which is why this hack is needed too.
Cow::Borrowed("crate::xlib")
};
}
let defaults = &self.defaults;
let entry = self
.map
.entry(name.into_owned())
.or_insert_with(|| defaults.clone());
entry.update_version(self.defaults.version);
entry.update_constraints(&self.defaults.constraints);
}
}
/// Declares that name should be available through its last path component.
///
/// For example, if name is `X::Y::Z` then it will be available as `Z`.
pub fn add_with_version(&mut self, name: &str, version: Option<Version>) {
if!self.common_checks(name) {
return;
}
if let Some(name) = self.strip_crate_name(name) {
let entry = self
.map
.entry(name.into_owned())
.or_insert(ImportConditions {
version,
constraints: Vec::new(),
});
entry.update_version(version);
// Since there is no constraint on this import, if any constraint
// is present, we can just remove it.
entry.constraints.clear();
}
}
/// Declares that name should be available through its last path component and provides
/// an optional feature constraint.
///
/// For example, if name is `X::Y::Z` then it will be available as `Z`.
pub fn add_with_constraint(
&mut self,
name: &str,
version: Option<Version>,
constraint: Option<&str>,
) {
if!self.common_checks(name) {
return;
}
if let Some(name) = self.strip_crate_name(name) {
let entry = if let Some(constraint) = constraint {
let constraint = String::from(constraint);
let entry = self
.map
.entry(name.into_owned())
.or_insert(ImportConditions {
version,
constraints: vec![constraint.clone()],
});
entry.add_constraint(constraint);
entry
} else {
let entry = self
.map
.entry(name.into_owned())
.or_insert(ImportConditions {
version,
constraints: Vec::new(),
});
// Since there is no constraint on this import, if any constraint
// is present, we can just remove it.
entry.constraints.clear();
entry
};
entry.update_version(version);
}
}
/// Declares that name should be available through its full path.
///
/// For example, if name is `X::Y` then it will be available as `X::Y`.
pub fn add_used_type(&mut self, used_type: &str) {
if let Some(i) = used_type.find("::") {
if i == 0 {
self.add(&used_type[2..]);
} else {
self.add(&used_type[..i]);
}
} else {
self.add(&format!("crate::{}", used_type));
}
}
pub fn add_used_types(&mut self, used_types: &[String]) {
for s in used_types {
self.add_used_type(s);
}
}
/// Declares that name should be available through its full path.
///
/// For example, if name is `X::Y` then it will be available as `X::Y`.
pub fn add_used_type_with_version(&mut self, used_type: &str, version: Option<Version>) {
if let Some(i) = used_type.find("::") {
if i == 0 {
self.add_with_version(&used_type[2..], version);
} else {
self.add_with_version(&used_type[..i], version);
}
} else {
self.add_with_version(&format!("crate::{}", used_type), version);
}
}
/// Tries to strip crate name prefix from given name.
///
/// Returns `None` if name matches crate name exactly. Otherwise returns
/// name with crate name prefix stripped or full name if there was no match.
fn strip_crate_name<'a>(&self, name: &'a str) -> Option<Cow<'a, str>> {
let prefix = &self.crate_name;
if!name.starts_with(prefix) {
return Some(Cow::Borrowed(name));
}
let rest = &name[prefix.len()..];
if rest.is_empty() {
None
} else if rest.starts_with("::") {
Some(Cow::Owned(format!("crate{}", rest)))
} else {
// It was false positive, return the whole name.
Some(Cow::Borrowed(name))
}
}
pub fn iter(&self) -> IntoIter<(&String, &ImportConditions)> {
let mut imports = self.map.iter().collect::<Vec<_>>();
imports.sort_by(compare_imports);
imports.into_iter()
}
}
pub struct ImportsWithDefault<'a> {
imports: &'a mut Imports,
}
impl<'a> ImportsWithDefault<'a> {
fn new(imports: &'a mut Imports) -> Self {
Self { imports }
}
}
impl Drop for ImportsWithDefault<'_> {
fn drop(&mut self) {
self.imports.reset_defaults()
}
}
impl Deref for ImportsWithDefault<'_> {
type Target = Imports;
fn deref(&self) -> &Self::Target {
self.imports
}
}
impl DerefMut for ImportsWithDefault<'_> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.imports
}
}
#[derive(Clone, Debug, Default)]
pub struct ImportConditions {
pub version: Option<Version>,
pub constraints: Vec<String>,
}
impl ImportConditions {
fn clear(&mut self) {
self.version = None;
self.constraints.clear();
}
fn update_version(&mut self, version: Option<Version>) {
if version < self.version {
self.version = version;
}
}
fn add_constraint(&mut self, constraint: String) {
// If the import is already present but doesn't have any constraint,
// we don't want to add one.
if self.constraints.is_empty() {
return;
}
// Otherwise, we just check if the constraint
// is already present or not before adding it.
if!self.constraints.iter().any(|x| x == &constraint) {
self.constraints.push(constraint);
}
}
fn update_constraints(&mut self, constraints: &[String]) {
// If the import is already present but doesn't have any constraint,
// we don't want to add one.
if self.constraints.is_empty() {
return;
}
if constraints.is_empty() {
// Since there is no constraint on this import, if any constraint
// is present, we can just remove it.
self.constraints.clear();
} else {
// Otherwise, we just check if the constraint
// is already present or not before adding it.
for constraint in constraints {
if!self.constraints.iter().any(|x| x == constraint) {
self.constraints.push(constraint.clone());
}
}
}
}
}
fn make_crate_name(gir: &Library) -> String {
if gir.is_glib_crate() {
crate_name("GLib")
} else {
crate_name(gir.namespace(namespaces::MAIN).name.as_str())
}
}
| add | identifier_name |
imports.rs | use super::namespaces;
use crate::{library::Library, nameutil::crate_name, version::Version};
use std::borrow::Cow;
use std::cmp::Ordering;
use std::collections::btree_map::BTreeMap;
use std::collections::HashSet;
use std::ops::{Deref, DerefMut};
use std::vec::IntoIter;
fn is_first_char_up(s: &str) -> bool {
s.chars().next().unwrap().is_uppercase()
}
fn check_up_eq(a: &str, b: &str) -> Ordering {
let is_a_up = is_first_char_up(a);
let is_b_up = is_first_char_up(b);
if is_a_up!= is_b_up {
if is_a_up {
return Ordering::Greater;
}
return Ordering::Less;
}
Ordering::Equal
}
/// This function is used by the `Imports` type to generate output like `cargo fmt` would.
///
/// For example:
///
/// ```text
/// use gdk; // lowercases come first.
/// use Window;
///
/// use gdk::foo; // lowercases come first here as well.
/// use gdk::Foo;
/// ```
#[allow(clippy::trivially_copy_pass_by_ref)] // because of the way this is used, the refs are needed
fn compare_imports(a: &(&String, &ImportConditions), b: &(&String, &ImportConditions)) -> Ordering {
let s = check_up_eq(a.0, b.0);
if s!= Ordering::Equal {
return s;
}
let mut a = a.0.split("::");
let mut b = b.0.split("::");
loop {
match (a.next(), b.next()) {
(Some(a), Some(b)) => {
let s = check_up_eq(a, b);
if s!= Ordering::Equal {
break s;
}
let s = a.partial_cmp(b).unwrap();
if s!= Ordering::Equal {
break s;
}
}
(Some(_), None) => break Ordering::Greater,
(None, Some(_)) => break Ordering::Less,
(None, None) => break Ordering::Equal,
}
}
}
/// Provides assistance in generating use declarations.
///
/// It takes into account that use declaration referring to names within the
/// same crate will look differently. It also avoids generating spurious
/// declarations referring to names from within the same module as the one we
/// are generating code for.
#[derive(Clone, Debug, Default)]
pub struct Imports {
/// Name of the current crate.
crate_name: String,
/// Names defined within current module. It doesn't need use declaration.
defined: HashSet<String>,
defaults: ImportConditions,
map: BTreeMap<String, ImportConditions>,
}
impl Imports {
pub fn new(gir: &Library) -> Imports {
Imports {
crate_name: make_crate_name(gir),
defined: HashSet::new(),
defaults: ImportConditions::default(),
map: BTreeMap::new(),
}
}
pub fn with_defined(gir: &Library, name: &str) -> Imports {
Imports {
crate_name: make_crate_name(gir),
defined: std::iter::once(name.to_owned()).collect(),
defaults: ImportConditions::default(),
map: BTreeMap::new(),
}
}
#[must_use = "ImportsWithDefault must live while defaults are needed"]
pub fn with_defaults(
&mut self,
version: Option<Version>,
constraint: &Option<String>,
) -> ImportsWithDefault<'_> {
let constraints = if let Some(constraint) = constraint {
vec![constraint.clone()]
} else {
vec![]
};
self.defaults = ImportConditions {
version,
constraints,
};
ImportsWithDefault::new(self)
}
fn reset_defaults(&mut self) {
self.defaults.clear();
}
/// The goals of this function is to discard unwanted imports like "crate". It
/// also extends the checks in case you are implementing "X". For example, you don't want to
/// import "X" or "crate::X" in this case.
fn common_checks(&self, name: &str) -> bool {
// The ffi namespace is used directly, including it is a programmer error.
assert_ne!(name, "crate::ffi");
if (!name.contains("::") && name!= "xlib") || self.defined.contains(name) {
false
} else if let Some(name) = name.strip_prefix("crate::") {
!self.defined.contains(name)
} else {
true
}
}
/// Declares that `name` is defined in scope
///
/// Removes existing imports from `self.map` and marks `name` as
/// available to counter future import "requests".
pub fn add_defined(&mut self, name: &str) {
if self.defined.insert(name.to_owned()) {
self.map.remove(name);
}
}
/// Declares that name should be available through its last path component.
///
/// For example, if name is `X::Y::Z` then it will be available as `Z`.
/// Uses defaults.
pub fn add(&mut self, name: &str) {
if!self.common_checks(name) {
return;
}
if let Some(mut name) = self.strip_crate_name(name) {
if name == "xlib" {
name = if self.crate_name == "gdk_x11" {
// Dirty little hack to allow to have correct import for GDKX11.
Cow::Borrowed("x11::xlib")
} else {
// gtk has a module named "xlib" which is why this hack is needed too.
Cow::Borrowed("crate::xlib")
};
}
let defaults = &self.defaults;
let entry = self
.map
.entry(name.into_owned())
.or_insert_with(|| defaults.clone());
entry.update_version(self.defaults.version);
entry.update_constraints(&self.defaults.constraints);
}
}
/// Declares that name should be available through its last path component.
///
/// For example, if name is `X::Y::Z` then it will be available as `Z`.
pub fn add_with_version(&mut self, name: &str, version: Option<Version>) {
if!self.common_checks(name) {
return;
}
if let Some(name) = self.strip_crate_name(name) {
let entry = self
.map
.entry(name.into_owned())
.or_insert(ImportConditions {
version,
constraints: Vec::new(),
});
entry.update_version(version);
// Since there is no constraint on this import, if any constraint
// is present, we can just remove it.
entry.constraints.clear();
}
}
/// Declares that name should be available through its last path component and provides
/// an optional feature constraint.
///
/// For example, if name is `X::Y::Z` then it will be available as `Z`.
pub fn add_with_constraint(
&mut self,
name: &str,
version: Option<Version>,
constraint: Option<&str>,
) {
if!self.common_checks(name) {
return;
}
if let Some(name) = self.strip_crate_name(name) {
let entry = if let Some(constraint) = constraint {
let constraint = String::from(constraint);
let entry = self
.map
.entry(name.into_owned())
.or_insert(ImportConditions {
version,
constraints: vec![constraint.clone()],
});
entry.add_constraint(constraint);
entry
} else {
let entry = self
.map
.entry(name.into_owned())
.or_insert(ImportConditions {
version,
constraints: Vec::new(),
}); | entry
};
entry.update_version(version);
}
}
/// Declares that name should be available through its full path.
///
/// For example, if name is `X::Y` then it will be available as `X::Y`.
pub fn add_used_type(&mut self, used_type: &str) {
if let Some(i) = used_type.find("::") {
if i == 0 {
self.add(&used_type[2..]);
} else {
self.add(&used_type[..i]);
}
} else {
self.add(&format!("crate::{}", used_type));
}
}
pub fn add_used_types(&mut self, used_types: &[String]) {
for s in used_types {
self.add_used_type(s);
}
}
/// Declares that name should be available through its full path.
///
/// For example, if name is `X::Y` then it will be available as `X::Y`.
pub fn add_used_type_with_version(&mut self, used_type: &str, version: Option<Version>) {
if let Some(i) = used_type.find("::") {
if i == 0 {
self.add_with_version(&used_type[2..], version);
} else {
self.add_with_version(&used_type[..i], version);
}
} else {
self.add_with_version(&format!("crate::{}", used_type), version);
}
}
/// Tries to strip crate name prefix from given name.
///
/// Returns `None` if name matches crate name exactly. Otherwise returns
/// name with crate name prefix stripped or full name if there was no match.
fn strip_crate_name<'a>(&self, name: &'a str) -> Option<Cow<'a, str>> {
let prefix = &self.crate_name;
if!name.starts_with(prefix) {
return Some(Cow::Borrowed(name));
}
let rest = &name[prefix.len()..];
if rest.is_empty() {
None
} else if rest.starts_with("::") {
Some(Cow::Owned(format!("crate{}", rest)))
} else {
// It was false positive, return the whole name.
Some(Cow::Borrowed(name))
}
}
pub fn iter(&self) -> IntoIter<(&String, &ImportConditions)> {
let mut imports = self.map.iter().collect::<Vec<_>>();
imports.sort_by(compare_imports);
imports.into_iter()
}
}
pub struct ImportsWithDefault<'a> {
imports: &'a mut Imports,
}
impl<'a> ImportsWithDefault<'a> {
fn new(imports: &'a mut Imports) -> Self {
Self { imports }
}
}
impl Drop for ImportsWithDefault<'_> {
fn drop(&mut self) {
self.imports.reset_defaults()
}
}
impl Deref for ImportsWithDefault<'_> {
type Target = Imports;
fn deref(&self) -> &Self::Target {
self.imports
}
}
impl DerefMut for ImportsWithDefault<'_> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.imports
}
}
#[derive(Clone, Debug, Default)]
pub struct ImportConditions {
pub version: Option<Version>,
pub constraints: Vec<String>,
}
impl ImportConditions {
fn clear(&mut self) {
self.version = None;
self.constraints.clear();
}
fn update_version(&mut self, version: Option<Version>) {
if version < self.version {
self.version = version;
}
}
fn add_constraint(&mut self, constraint: String) {
// If the import is already present but doesn't have any constraint,
// we don't want to add one.
if self.constraints.is_empty() {
return;
}
// Otherwise, we just check if the constraint
// is already present or not before adding it.
if!self.constraints.iter().any(|x| x == &constraint) {
self.constraints.push(constraint);
}
}
fn update_constraints(&mut self, constraints: &[String]) {
// If the import is already present but doesn't have any constraint,
// we don't want to add one.
if self.constraints.is_empty() {
return;
}
if constraints.is_empty() {
// Since there is no constraint on this import, if any constraint
// is present, we can just remove it.
self.constraints.clear();
} else {
// Otherwise, we just check if the constraint
// is already present or not before adding it.
for constraint in constraints {
if!self.constraints.iter().any(|x| x == constraint) {
self.constraints.push(constraint.clone());
}
}
}
}
}
fn make_crate_name(gir: &Library) -> String {
if gir.is_glib_crate() {
crate_name("GLib")
} else {
crate_name(gir.namespace(namespaces::MAIN).name.as_str())
}
} | // Since there is no constraint on this import, if any constraint
// is present, we can just remove it.
entry.constraints.clear(); | random_line_split |
imports.rs | use super::namespaces;
use crate::{library::Library, nameutil::crate_name, version::Version};
use std::borrow::Cow;
use std::cmp::Ordering;
use std::collections::btree_map::BTreeMap;
use std::collections::HashSet;
use std::ops::{Deref, DerefMut};
use std::vec::IntoIter;
fn is_first_char_up(s: &str) -> bool {
s.chars().next().unwrap().is_uppercase()
}
fn check_up_eq(a: &str, b: &str) -> Ordering {
let is_a_up = is_first_char_up(a);
let is_b_up = is_first_char_up(b);
if is_a_up!= is_b_up {
if is_a_up {
return Ordering::Greater;
}
return Ordering::Less;
}
Ordering::Equal
}
/// This function is used by the `Imports` type to generate output like `cargo fmt` would.
///
/// For example:
///
/// ```text
/// use gdk; // lowercases come first.
/// use Window;
///
/// use gdk::foo; // lowercases come first here as well.
/// use gdk::Foo;
/// ```
#[allow(clippy::trivially_copy_pass_by_ref)] // because of the way this is used, the refs are needed
fn compare_imports(a: &(&String, &ImportConditions), b: &(&String, &ImportConditions)) -> Ordering {
let s = check_up_eq(a.0, b.0);
if s!= Ordering::Equal {
return s;
}
let mut a = a.0.split("::");
let mut b = b.0.split("::");
loop {
match (a.next(), b.next()) {
(Some(a), Some(b)) => {
let s = check_up_eq(a, b);
if s!= Ordering::Equal {
break s;
}
let s = a.partial_cmp(b).unwrap();
if s!= Ordering::Equal {
break s;
}
}
(Some(_), None) => break Ordering::Greater,
(None, Some(_)) => break Ordering::Less,
(None, None) => break Ordering::Equal,
}
}
}
/// Provides assistance in generating use declarations.
///
/// It takes into account that use declaration referring to names within the
/// same crate will look differently. It also avoids generating spurious
/// declarations referring to names from within the same module as the one we
/// are generating code for.
#[derive(Clone, Debug, Default)]
pub struct Imports {
/// Name of the current crate.
crate_name: String,
/// Names defined within current module. It doesn't need use declaration.
defined: HashSet<String>,
defaults: ImportConditions,
map: BTreeMap<String, ImportConditions>,
}
impl Imports {
pub fn new(gir: &Library) -> Imports {
Imports {
crate_name: make_crate_name(gir),
defined: HashSet::new(),
defaults: ImportConditions::default(),
map: BTreeMap::new(),
}
}
pub fn with_defined(gir: &Library, name: &str) -> Imports {
Imports {
crate_name: make_crate_name(gir),
defined: std::iter::once(name.to_owned()).collect(),
defaults: ImportConditions::default(),
map: BTreeMap::new(),
}
}
#[must_use = "ImportsWithDefault must live while defaults are needed"]
pub fn with_defaults(
&mut self,
version: Option<Version>,
constraint: &Option<String>,
) -> ImportsWithDefault<'_> {
let constraints = if let Some(constraint) = constraint {
vec![constraint.clone()]
} else {
vec![]
};
self.defaults = ImportConditions {
version,
constraints,
};
ImportsWithDefault::new(self)
}
fn reset_defaults(&mut self) {
self.defaults.clear();
}
/// The goals of this function is to discard unwanted imports like "crate". It
/// also extends the checks in case you are implementing "X". For example, you don't want to
/// import "X" or "crate::X" in this case.
fn common_checks(&self, name: &str) -> bool {
// The ffi namespace is used directly, including it is a programmer error.
assert_ne!(name, "crate::ffi");
if (!name.contains("::") && name!= "xlib") || self.defined.contains(name) {
false
} else if let Some(name) = name.strip_prefix("crate::") {
!self.defined.contains(name)
} else {
true
}
}
/// Declares that `name` is defined in scope
///
/// Removes existing imports from `self.map` and marks `name` as
/// available to counter future import "requests".
pub fn add_defined(&mut self, name: &str) {
if self.defined.insert(name.to_owned()) {
self.map.remove(name);
}
}
/// Declares that name should be available through its last path component.
///
/// For example, if name is `X::Y::Z` then it will be available as `Z`.
/// Uses defaults.
pub fn add(&mut self, name: &str) {
if!self.common_checks(name) {
return;
}
if let Some(mut name) = self.strip_crate_name(name) {
if name == "xlib" {
name = if self.crate_name == "gdk_x11" | else {
// gtk has a module named "xlib" which is why this hack is needed too.
Cow::Borrowed("crate::xlib")
};
}
let defaults = &self.defaults;
let entry = self
.map
.entry(name.into_owned())
.or_insert_with(|| defaults.clone());
entry.update_version(self.defaults.version);
entry.update_constraints(&self.defaults.constraints);
}
}
/// Declares that name should be available through its last path component.
///
/// For example, if name is `X::Y::Z` then it will be available as `Z`.
pub fn add_with_version(&mut self, name: &str, version: Option<Version>) {
if!self.common_checks(name) {
return;
}
if let Some(name) = self.strip_crate_name(name) {
let entry = self
.map
.entry(name.into_owned())
.or_insert(ImportConditions {
version,
constraints: Vec::new(),
});
entry.update_version(version);
// Since there is no constraint on this import, if any constraint
// is present, we can just remove it.
entry.constraints.clear();
}
}
/// Declares that name should be available through its last path component and provides
/// an optional feature constraint.
///
/// For example, if name is `X::Y::Z` then it will be available as `Z`.
pub fn add_with_constraint(
&mut self,
name: &str,
version: Option<Version>,
constraint: Option<&str>,
) {
if!self.common_checks(name) {
return;
}
if let Some(name) = self.strip_crate_name(name) {
let entry = if let Some(constraint) = constraint {
let constraint = String::from(constraint);
let entry = self
.map
.entry(name.into_owned())
.or_insert(ImportConditions {
version,
constraints: vec![constraint.clone()],
});
entry.add_constraint(constraint);
entry
} else {
let entry = self
.map
.entry(name.into_owned())
.or_insert(ImportConditions {
version,
constraints: Vec::new(),
});
// Since there is no constraint on this import, if any constraint
// is present, we can just remove it.
entry.constraints.clear();
entry
};
entry.update_version(version);
}
}
/// Declares that name should be available through its full path.
///
/// For example, if name is `X::Y` then it will be available as `X::Y`.
pub fn add_used_type(&mut self, used_type: &str) {
if let Some(i) = used_type.find("::") {
if i == 0 {
self.add(&used_type[2..]);
} else {
self.add(&used_type[..i]);
}
} else {
self.add(&format!("crate::{}", used_type));
}
}
pub fn add_used_types(&mut self, used_types: &[String]) {
for s in used_types {
self.add_used_type(s);
}
}
/// Declares that name should be available through its full path.
///
/// For example, if name is `X::Y` then it will be available as `X::Y`.
pub fn add_used_type_with_version(&mut self, used_type: &str, version: Option<Version>) {
if let Some(i) = used_type.find("::") {
if i == 0 {
self.add_with_version(&used_type[2..], version);
} else {
self.add_with_version(&used_type[..i], version);
}
} else {
self.add_with_version(&format!("crate::{}", used_type), version);
}
}
/// Tries to strip crate name prefix from given name.
///
/// Returns `None` if name matches crate name exactly. Otherwise returns
/// name with crate name prefix stripped or full name if there was no match.
fn strip_crate_name<'a>(&self, name: &'a str) -> Option<Cow<'a, str>> {
let prefix = &self.crate_name;
if!name.starts_with(prefix) {
return Some(Cow::Borrowed(name));
}
let rest = &name[prefix.len()..];
if rest.is_empty() {
None
} else if rest.starts_with("::") {
Some(Cow::Owned(format!("crate{}", rest)))
} else {
// It was false positive, return the whole name.
Some(Cow::Borrowed(name))
}
}
pub fn iter(&self) -> IntoIter<(&String, &ImportConditions)> {
let mut imports = self.map.iter().collect::<Vec<_>>();
imports.sort_by(compare_imports);
imports.into_iter()
}
}
pub struct ImportsWithDefault<'a> {
imports: &'a mut Imports,
}
impl<'a> ImportsWithDefault<'a> {
fn new(imports: &'a mut Imports) -> Self {
Self { imports }
}
}
impl Drop for ImportsWithDefault<'_> {
fn drop(&mut self) {
self.imports.reset_defaults()
}
}
impl Deref for ImportsWithDefault<'_> {
type Target = Imports;
fn deref(&self) -> &Self::Target {
self.imports
}
}
impl DerefMut for ImportsWithDefault<'_> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.imports
}
}
#[derive(Clone, Debug, Default)]
pub struct ImportConditions {
pub version: Option<Version>,
pub constraints: Vec<String>,
}
impl ImportConditions {
fn clear(&mut self) {
self.version = None;
self.constraints.clear();
}
fn update_version(&mut self, version: Option<Version>) {
if version < self.version {
self.version = version;
}
}
fn add_constraint(&mut self, constraint: String) {
// If the import is already present but doesn't have any constraint,
// we don't want to add one.
if self.constraints.is_empty() {
return;
}
// Otherwise, we just check if the constraint
// is already present or not before adding it.
if!self.constraints.iter().any(|x| x == &constraint) {
self.constraints.push(constraint);
}
}
fn update_constraints(&mut self, constraints: &[String]) {
// If the import is already present but doesn't have any constraint,
// we don't want to add one.
if self.constraints.is_empty() {
return;
}
if constraints.is_empty() {
// Since there is no constraint on this import, if any constraint
// is present, we can just remove it.
self.constraints.clear();
} else {
// Otherwise, we just check if the constraint
// is already present or not before adding it.
for constraint in constraints {
if!self.constraints.iter().any(|x| x == constraint) {
self.constraints.push(constraint.clone());
}
}
}
}
}
fn make_crate_name(gir: &Library) -> String {
if gir.is_glib_crate() {
crate_name("GLib")
} else {
crate_name(gir.namespace(namespaces::MAIN).name.as_str())
}
}
| {
// Dirty little hack to allow to have correct import for GDKX11.
Cow::Borrowed("x11::xlib")
} | conditional_block |
text_run.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use font::{Font, FontHandleMethods, FontMetrics, IS_WHITESPACE_SHAPING_FLAG, RunMetrics};
use font::{ShapingOptions};
use platform::font_template::FontTemplateData;
use range::Range;
use std::cell::Cell;
use std::cmp::{Ordering, max};
use std::slice::Iter;
use std::sync::Arc;
use text::glyph::{CharIndex, GlyphStore};
use webrender_traits;
thread_local! {
static INDEX_OF_FIRST_GLYPH_RUN_CACHE: Cell<Option<(*const TextRun, CharIndex, usize)>> =
Cell::new(None)
}
/// A single "paragraph" of text in one font size and style.
#[derive(Clone, Deserialize, Serialize)]
pub struct TextRun {
/// The UTF-8 string represented by this text run.
pub text: Arc<String>,
pub font_template: Arc<FontTemplateData>,
pub actual_pt_size: Au,
pub font_metrics: FontMetrics,
pub font_key: Option<webrender_traits::FontKey>,
/// The glyph runs that make up this text run.
pub glyphs: Arc<Vec<GlyphRun>>,
pub bidi_level: u8,
}
impl Drop for TextRun {
fn drop(&mut self) {
// Invalidate the glyph run cache if it was our text run that got freed.
INDEX_OF_FIRST_GLYPH_RUN_CACHE.with(|index_of_first_glyph_run_cache| {
if let Some((text_run_ptr, _, _)) = index_of_first_glyph_run_cache.get() { | if text_run_ptr == (self as *const TextRun) {
index_of_first_glyph_run_cache.set(None);
}
}
})
}
}
/// A single series of glyphs within a text run.
#[derive(Clone, Deserialize, Serialize)]
pub struct GlyphRun {
/// The glyphs.
pub glyph_store: Arc<GlyphStore>,
/// The range of characters in the containing run.
pub range: Range<CharIndex>,
}
pub struct NaturalWordSliceIterator<'a> {
glyphs: &'a [GlyphRun],
index: usize,
range: Range<CharIndex>,
reverse: bool,
}
impl GlyphRun {
fn compare(&self, key: &CharIndex) -> Ordering {
if *key < self.range.begin() {
Ordering::Greater
} else if *key >= self.range.end() {
Ordering::Less
} else {
Ordering::Equal
}
}
}
/// A "slice" of a text run is a series of contiguous glyphs that all belong to the same glyph
/// store. Line breaking strategies yield these.
pub struct TextRunSlice<'a> {
/// The glyph store that the glyphs in this slice belong to.
pub glyphs: &'a GlyphStore,
/// The character index that this slice begins at, relative to the start of the *text run*.
pub offset: CharIndex,
/// The range that these glyphs encompass, relative to the start of the *glyph store*.
pub range: Range<CharIndex>,
}
impl<'a> TextRunSlice<'a> {
/// Returns the range that these glyphs encompass, relative to the start of the *text run*.
#[inline]
pub fn text_run_range(&self) -> Range<CharIndex> {
let mut range = self.range;
range.shift_by(self.offset);
range
}
}
impl<'a> Iterator for NaturalWordSliceIterator<'a> {
type Item = TextRunSlice<'a>;
// inline(always) due to the inefficient rt failures messing up inline heuristics, I think.
#[inline(always)]
fn next(&mut self) -> Option<TextRunSlice<'a>> {
let slice_glyphs;
if self.reverse {
if self.index == 0 {
return None;
}
self.index -= 1;
slice_glyphs = &self.glyphs[self.index];
} else {
if self.index >= self.glyphs.len() {
return None;
}
slice_glyphs = &self.glyphs[self.index];
self.index += 1;
}
let mut char_range = self.range.intersect(&slice_glyphs.range);
let slice_range_begin = slice_glyphs.range.begin();
char_range.shift_by(-slice_range_begin);
if!char_range.is_empty() {
Some(TextRunSlice {
glyphs: &*slice_glyphs.glyph_store,
offset: slice_range_begin,
range: char_range,
})
} else {
None
}
}
}
pub struct CharacterSliceIterator<'a> {
glyph_run: Option<&'a GlyphRun>,
glyph_run_iter: Iter<'a, GlyphRun>,
range: Range<CharIndex>,
}
impl<'a> Iterator for CharacterSliceIterator<'a> {
type Item = TextRunSlice<'a>;
// inline(always) due to the inefficient rt failures messing up inline heuristics, I think.
#[inline(always)]
fn next(&mut self) -> Option<TextRunSlice<'a>> {
let glyph_run = match self.glyph_run {
None => return None,
Some(glyph_run) => glyph_run,
};
debug_assert!(!self.range.is_empty());
let index_to_return = self.range.begin();
self.range.adjust_by(CharIndex(1), CharIndex(-1));
if self.range.is_empty() {
// We're done.
self.glyph_run = None
} else if self.range.intersect(&glyph_run.range).is_empty() {
// Move on to the next glyph run.
self.glyph_run = self.glyph_run_iter.next();
}
let index_within_glyph_run = index_to_return - glyph_run.range.begin();
Some(TextRunSlice {
glyphs: &*glyph_run.glyph_store,
offset: glyph_run.range.begin(),
range: Range::new(index_within_glyph_run, CharIndex(1)),
})
}
}
impl<'a> TextRun {
pub fn new(font: &mut Font, text: String, options: &ShapingOptions, bidi_level: u8) -> TextRun {
let glyphs = TextRun::break_and_shape(font, &text, options);
TextRun {
text: Arc::new(text),
font_metrics: font.metrics.clone(),
font_template: font.handle.template(),
font_key: font.font_key,
actual_pt_size: font.actual_pt_size,
glyphs: Arc::new(glyphs),
bidi_level: bidi_level,
}
}
pub fn break_and_shape(font: &mut Font, text: &str, options: &ShapingOptions)
-> Vec<GlyphRun> {
// TODO(Issue #230): do a better job. See Gecko's LineBreaker.
let mut glyphs = vec!();
let (mut byte_i, mut char_i) = (0, CharIndex(0));
let mut cur_slice_is_whitespace = false;
let (mut byte_last_boundary, mut char_last_boundary) = (0, CharIndex(0));
while byte_i < text.len() {
let range = text.char_range_at(byte_i);
let ch = range.ch;
let next = range.next;
// Slices alternate between whitespace and non-whitespace,
// representing line break opportunities.
let can_break_before = if cur_slice_is_whitespace {
match ch {
'' | '\t' | '\n' => false,
_ => {
cur_slice_is_whitespace = false;
true
}
}
} else {
match ch {
'' | '\t' | '\n' => {
cur_slice_is_whitespace = true;
true
},
_ => false
}
};
// Create a glyph store for this slice if it's nonempty.
if can_break_before && byte_i > byte_last_boundary {
let slice = &text[byte_last_boundary.. byte_i];
debug!("creating glyph store for slice {} (ws? {}), {} - {} in run {}",
slice,!cur_slice_is_whitespace, byte_last_boundary, byte_i, text);
let mut options = *options;
if!cur_slice_is_whitespace {
options.flags.insert(IS_WHITESPACE_SHAPING_FLAG);
}
glyphs.push(GlyphRun {
glyph_store: font.shape_text(slice, &options),
range: Range::new(char_last_boundary, char_i - char_last_boundary),
});
byte_last_boundary = byte_i;
char_last_boundary = char_i;
}
byte_i = next;
char_i = char_i + CharIndex(1);
}
// Create a glyph store for the final slice if it's nonempty.
if byte_i > byte_last_boundary {
let slice = &text[byte_last_boundary..];
debug!("creating glyph store for final slice {} (ws? {}), {} - {} in run {}",
slice, cur_slice_is_whitespace, byte_last_boundary, text.len(), text);
let mut options = *options;
if cur_slice_is_whitespace {
options.flags.insert(IS_WHITESPACE_SHAPING_FLAG);
}
glyphs.push(GlyphRun {
glyph_store: font.shape_text(slice, &options),
range: Range::new(char_last_boundary, char_i - char_last_boundary),
});
}
glyphs
}
pub fn ascent(&self) -> Au {
self.font_metrics.ascent
}
pub fn descent(&self) -> Au {
self.font_metrics.descent
}
pub fn advance_for_range(&self, range: &Range<CharIndex>) -> Au {
if range.is_empty() {
return Au(0)
}
// TODO(Issue #199): alter advance direction for RTL
// TODO(Issue #98): using inter-char and inter-word spacing settings when measuring text
self.natural_word_slices_in_range(range)
.fold(Au(0), |advance, slice| {
advance + slice.glyphs.advance_for_char_range(&slice.range)
})
}
pub fn metrics_for_range(&self, range: &Range<CharIndex>) -> RunMetrics {
RunMetrics::new(self.advance_for_range(range),
self.font_metrics.ascent,
self.font_metrics.descent)
}
pub fn metrics_for_slice(&self, glyphs: &GlyphStore, slice_range: &Range<CharIndex>)
-> RunMetrics {
RunMetrics::new(glyphs.advance_for_char_range(slice_range),
self.font_metrics.ascent,
self.font_metrics.descent)
}
pub fn min_width_for_range(&self, range: &Range<CharIndex>) -> Au {
debug!("iterating outer range {:?}", range);
self.natural_word_slices_in_range(range).fold(Au(0), |max_piece_width, slice| {
debug!("iterated on {:?}[{:?}]", slice.offset, slice.range);
max(max_piece_width, self.advance_for_range(&slice.range))
})
}
/// Returns the index of the first glyph run containing the given character index.
fn index_of_first_glyph_run_containing(&self, index: CharIndex) -> Option<usize> {
let self_ptr = self as *const TextRun;
INDEX_OF_FIRST_GLYPH_RUN_CACHE.with(|index_of_first_glyph_run_cache| {
if let Some((last_text_run, last_index, last_result)) =
index_of_first_glyph_run_cache.get() {
if last_text_run == self_ptr && last_index == index {
return Some(last_result)
}
}
if let Ok(result) = (&**self.glyphs).binary_search_by(|current| current.compare(&index)) {
index_of_first_glyph_run_cache.set(Some((self_ptr, index, result)));
Some(result)
} else {
None
}
})
}
/// Returns an iterator that will iterate over all slices of glyphs that represent natural
/// words in the given range.
pub fn natural_word_slices_in_range(&'a self, range: &Range<CharIndex>)
-> NaturalWordSliceIterator<'a> {
let index = match self.index_of_first_glyph_run_containing(range.begin()) {
None => self.glyphs.len(),
Some(index) => index,
};
NaturalWordSliceIterator {
glyphs: &self.glyphs[..],
index: index,
range: *range,
reverse: false,
}
}
/// Returns an iterator that over natural word slices in visual order (left to right or
/// right to left, depending on the bidirectional embedding level).
pub fn natural_word_slices_in_visual_order(&'a self, range: &Range<CharIndex>)
-> NaturalWordSliceIterator<'a> {
// Iterate in reverse order if bidi level is RTL.
let reverse = self.bidi_level % 2 == 1;
let index = if reverse {
match self.index_of_first_glyph_run_containing(range.end() - CharIndex(1)) {
Some(i) => i + 1, // In reverse mode, index points one past the next element.
None => 0
}
} else {
match self.index_of_first_glyph_run_containing(range.begin()) {
Some(i) => i,
None => self.glyphs.len()
}
};
NaturalWordSliceIterator {
glyphs: &self.glyphs[..],
index: index,
range: *range,
reverse: reverse,
}
}
/// Returns an iterator that will iterate over all slices of glyphs that represent individual
/// characters in the given range.
pub fn character_slices_in_range(&'a self, range: &Range<CharIndex>)
-> CharacterSliceIterator<'a> {
let index = match self.index_of_first_glyph_run_containing(range.begin()) {
None => self.glyphs.len(),
Some(index) => index,
};
let mut glyph_run_iter = self.glyphs[index..].iter();
let first_glyph_run = glyph_run_iter.next();
CharacterSliceIterator {
glyph_run: first_glyph_run,
glyph_run_iter: glyph_run_iter,
range: *range,
}
}
} | random_line_split |
|
text_run.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use font::{Font, FontHandleMethods, FontMetrics, IS_WHITESPACE_SHAPING_FLAG, RunMetrics};
use font::{ShapingOptions};
use platform::font_template::FontTemplateData;
use range::Range;
use std::cell::Cell;
use std::cmp::{Ordering, max};
use std::slice::Iter;
use std::sync::Arc;
use text::glyph::{CharIndex, GlyphStore};
use webrender_traits;
thread_local! {
static INDEX_OF_FIRST_GLYPH_RUN_CACHE: Cell<Option<(*const TextRun, CharIndex, usize)>> =
Cell::new(None)
}
/// A single "paragraph" of text in one font size and style.
#[derive(Clone, Deserialize, Serialize)]
pub struct TextRun {
/// The UTF-8 string represented by this text run.
pub text: Arc<String>,
pub font_template: Arc<FontTemplateData>,
pub actual_pt_size: Au,
pub font_metrics: FontMetrics,
pub font_key: Option<webrender_traits::FontKey>,
/// The glyph runs that make up this text run.
pub glyphs: Arc<Vec<GlyphRun>>,
pub bidi_level: u8,
}
impl Drop for TextRun {
fn drop(&mut self) {
// Invalidate the glyph run cache if it was our text run that got freed.
INDEX_OF_FIRST_GLYPH_RUN_CACHE.with(|index_of_first_glyph_run_cache| {
if let Some((text_run_ptr, _, _)) = index_of_first_glyph_run_cache.get() {
if text_run_ptr == (self as *const TextRun) {
index_of_first_glyph_run_cache.set(None);
}
}
})
}
}
/// A single series of glyphs within a text run.
#[derive(Clone, Deserialize, Serialize)]
pub struct GlyphRun {
/// The glyphs.
pub glyph_store: Arc<GlyphStore>,
/// The range of characters in the containing run.
pub range: Range<CharIndex>,
}
pub struct NaturalWordSliceIterator<'a> {
glyphs: &'a [GlyphRun],
index: usize,
range: Range<CharIndex>,
reverse: bool,
}
impl GlyphRun {
fn compare(&self, key: &CharIndex) -> Ordering {
if *key < self.range.begin() {
Ordering::Greater
} else if *key >= self.range.end() {
Ordering::Less
} else {
Ordering::Equal
}
}
}
/// A "slice" of a text run is a series of contiguous glyphs that all belong to the same glyph
/// store. Line breaking strategies yield these.
pub struct TextRunSlice<'a> {
/// The glyph store that the glyphs in this slice belong to.
pub glyphs: &'a GlyphStore,
/// The character index that this slice begins at, relative to the start of the *text run*.
pub offset: CharIndex,
/// The range that these glyphs encompass, relative to the start of the *glyph store*.
pub range: Range<CharIndex>,
}
impl<'a> TextRunSlice<'a> {
/// Returns the range that these glyphs encompass, relative to the start of the *text run*.
#[inline]
pub fn text_run_range(&self) -> Range<CharIndex> {
let mut range = self.range;
range.shift_by(self.offset);
range
}
}
impl<'a> Iterator for NaturalWordSliceIterator<'a> {
type Item = TextRunSlice<'a>;
// inline(always) due to the inefficient rt failures messing up inline heuristics, I think.
#[inline(always)]
fn next(&mut self) -> Option<TextRunSlice<'a>> {
let slice_glyphs;
if self.reverse {
if self.index == 0 {
return None;
}
self.index -= 1;
slice_glyphs = &self.glyphs[self.index];
} else {
if self.index >= self.glyphs.len() {
return None;
}
slice_glyphs = &self.glyphs[self.index];
self.index += 1;
}
let mut char_range = self.range.intersect(&slice_glyphs.range);
let slice_range_begin = slice_glyphs.range.begin();
char_range.shift_by(-slice_range_begin);
if!char_range.is_empty() {
Some(TextRunSlice {
glyphs: &*slice_glyphs.glyph_store,
offset: slice_range_begin,
range: char_range,
})
} else {
None
}
}
}
pub struct CharacterSliceIterator<'a> {
glyph_run: Option<&'a GlyphRun>,
glyph_run_iter: Iter<'a, GlyphRun>,
range: Range<CharIndex>,
}
impl<'a> Iterator for CharacterSliceIterator<'a> {
type Item = TextRunSlice<'a>;
// inline(always) due to the inefficient rt failures messing up inline heuristics, I think.
#[inline(always)]
fn next(&mut self) -> Option<TextRunSlice<'a>> {
let glyph_run = match self.glyph_run {
None => return None,
Some(glyph_run) => glyph_run,
};
debug_assert!(!self.range.is_empty());
let index_to_return = self.range.begin();
self.range.adjust_by(CharIndex(1), CharIndex(-1));
if self.range.is_empty() {
// We're done.
self.glyph_run = None
} else if self.range.intersect(&glyph_run.range).is_empty() {
// Move on to the next glyph run.
self.glyph_run = self.glyph_run_iter.next();
}
let index_within_glyph_run = index_to_return - glyph_run.range.begin();
Some(TextRunSlice {
glyphs: &*glyph_run.glyph_store,
offset: glyph_run.range.begin(),
range: Range::new(index_within_glyph_run, CharIndex(1)),
})
}
}
impl<'a> TextRun {
pub fn new(font: &mut Font, text: String, options: &ShapingOptions, bidi_level: u8) -> TextRun {
let glyphs = TextRun::break_and_shape(font, &text, options);
TextRun {
text: Arc::new(text),
font_metrics: font.metrics.clone(),
font_template: font.handle.template(),
font_key: font.font_key,
actual_pt_size: font.actual_pt_size,
glyphs: Arc::new(glyphs),
bidi_level: bidi_level,
}
}
pub fn break_and_shape(font: &mut Font, text: &str, options: &ShapingOptions)
-> Vec<GlyphRun> {
// TODO(Issue #230): do a better job. See Gecko's LineBreaker.
let mut glyphs = vec!();
let (mut byte_i, mut char_i) = (0, CharIndex(0));
let mut cur_slice_is_whitespace = false;
let (mut byte_last_boundary, mut char_last_boundary) = (0, CharIndex(0));
while byte_i < text.len() {
let range = text.char_range_at(byte_i);
let ch = range.ch;
let next = range.next;
// Slices alternate between whitespace and non-whitespace,
// representing line break opportunities.
let can_break_before = if cur_slice_is_whitespace {
match ch {
'' | '\t' | '\n' => false,
_ => {
cur_slice_is_whitespace = false;
true
}
}
} else {
match ch {
'' | '\t' | '\n' => {
cur_slice_is_whitespace = true;
true
},
_ => false
}
};
// Create a glyph store for this slice if it's nonempty.
if can_break_before && byte_i > byte_last_boundary {
let slice = &text[byte_last_boundary.. byte_i];
debug!("creating glyph store for slice {} (ws? {}), {} - {} in run {}",
slice,!cur_slice_is_whitespace, byte_last_boundary, byte_i, text);
let mut options = *options;
if!cur_slice_is_whitespace {
options.flags.insert(IS_WHITESPACE_SHAPING_FLAG);
}
glyphs.push(GlyphRun {
glyph_store: font.shape_text(slice, &options),
range: Range::new(char_last_boundary, char_i - char_last_boundary),
});
byte_last_boundary = byte_i;
char_last_boundary = char_i;
}
byte_i = next;
char_i = char_i + CharIndex(1);
}
// Create a glyph store for the final slice if it's nonempty.
if byte_i > byte_last_boundary {
let slice = &text[byte_last_boundary..];
debug!("creating glyph store for final slice {} (ws? {}), {} - {} in run {}",
slice, cur_slice_is_whitespace, byte_last_boundary, text.len(), text);
let mut options = *options;
if cur_slice_is_whitespace {
options.flags.insert(IS_WHITESPACE_SHAPING_FLAG);
}
glyphs.push(GlyphRun {
glyph_store: font.shape_text(slice, &options),
range: Range::new(char_last_boundary, char_i - char_last_boundary),
});
}
glyphs
}
pub fn ascent(&self) -> Au {
self.font_metrics.ascent
}
pub fn descent(&self) -> Au {
self.font_metrics.descent
}
pub fn advance_for_range(&self, range: &Range<CharIndex>) -> Au {
if range.is_empty() {
return Au(0)
}
// TODO(Issue #199): alter advance direction for RTL
// TODO(Issue #98): using inter-char and inter-word spacing settings when measuring text
self.natural_word_slices_in_range(range)
.fold(Au(0), |advance, slice| {
advance + slice.glyphs.advance_for_char_range(&slice.range)
})
}
pub fn metrics_for_range(&self, range: &Range<CharIndex>) -> RunMetrics {
RunMetrics::new(self.advance_for_range(range),
self.font_metrics.ascent,
self.font_metrics.descent)
}
pub fn metrics_for_slice(&self, glyphs: &GlyphStore, slice_range: &Range<CharIndex>)
-> RunMetrics {
RunMetrics::new(glyphs.advance_for_char_range(slice_range),
self.font_metrics.ascent,
self.font_metrics.descent)
}
pub fn | (&self, range: &Range<CharIndex>) -> Au {
debug!("iterating outer range {:?}", range);
self.natural_word_slices_in_range(range).fold(Au(0), |max_piece_width, slice| {
debug!("iterated on {:?}[{:?}]", slice.offset, slice.range);
max(max_piece_width, self.advance_for_range(&slice.range))
})
}
/// Returns the index of the first glyph run containing the given character index.
fn index_of_first_glyph_run_containing(&self, index: CharIndex) -> Option<usize> {
let self_ptr = self as *const TextRun;
INDEX_OF_FIRST_GLYPH_RUN_CACHE.with(|index_of_first_glyph_run_cache| {
if let Some((last_text_run, last_index, last_result)) =
index_of_first_glyph_run_cache.get() {
if last_text_run == self_ptr && last_index == index {
return Some(last_result)
}
}
if let Ok(result) = (&**self.glyphs).binary_search_by(|current| current.compare(&index)) {
index_of_first_glyph_run_cache.set(Some((self_ptr, index, result)));
Some(result)
} else {
None
}
})
}
/// Returns an iterator that will iterate over all slices of glyphs that represent natural
/// words in the given range.
pub fn natural_word_slices_in_range(&'a self, range: &Range<CharIndex>)
-> NaturalWordSliceIterator<'a> {
let index = match self.index_of_first_glyph_run_containing(range.begin()) {
None => self.glyphs.len(),
Some(index) => index,
};
NaturalWordSliceIterator {
glyphs: &self.glyphs[..],
index: index,
range: *range,
reverse: false,
}
}
/// Returns an iterator that over natural word slices in visual order (left to right or
/// right to left, depending on the bidirectional embedding level).
pub fn natural_word_slices_in_visual_order(&'a self, range: &Range<CharIndex>)
-> NaturalWordSliceIterator<'a> {
// Iterate in reverse order if bidi level is RTL.
let reverse = self.bidi_level % 2 == 1;
let index = if reverse {
match self.index_of_first_glyph_run_containing(range.end() - CharIndex(1)) {
Some(i) => i + 1, // In reverse mode, index points one past the next element.
None => 0
}
} else {
match self.index_of_first_glyph_run_containing(range.begin()) {
Some(i) => i,
None => self.glyphs.len()
}
};
NaturalWordSliceIterator {
glyphs: &self.glyphs[..],
index: index,
range: *range,
reverse: reverse,
}
}
/// Returns an iterator that will iterate over all slices of glyphs that represent individual
/// characters in the given range.
pub fn character_slices_in_range(&'a self, range: &Range<CharIndex>)
-> CharacterSliceIterator<'a> {
let index = match self.index_of_first_glyph_run_containing(range.begin()) {
None => self.glyphs.len(),
Some(index) => index,
};
let mut glyph_run_iter = self.glyphs[index..].iter();
let first_glyph_run = glyph_run_iter.next();
CharacterSliceIterator {
glyph_run: first_glyph_run,
glyph_run_iter: glyph_run_iter,
range: *range,
}
}
}
| min_width_for_range | identifier_name |
text_run.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use font::{Font, FontHandleMethods, FontMetrics, IS_WHITESPACE_SHAPING_FLAG, RunMetrics};
use font::{ShapingOptions};
use platform::font_template::FontTemplateData;
use range::Range;
use std::cell::Cell;
use std::cmp::{Ordering, max};
use std::slice::Iter;
use std::sync::Arc;
use text::glyph::{CharIndex, GlyphStore};
use webrender_traits;
thread_local! {
static INDEX_OF_FIRST_GLYPH_RUN_CACHE: Cell<Option<(*const TextRun, CharIndex, usize)>> =
Cell::new(None)
}
/// A single "paragraph" of text in one font size and style.
#[derive(Clone, Deserialize, Serialize)]
pub struct TextRun {
/// The UTF-8 string represented by this text run.
pub text: Arc<String>,
pub font_template: Arc<FontTemplateData>,
pub actual_pt_size: Au,
pub font_metrics: FontMetrics,
pub font_key: Option<webrender_traits::FontKey>,
/// The glyph runs that make up this text run.
pub glyphs: Arc<Vec<GlyphRun>>,
pub bidi_level: u8,
}
impl Drop for TextRun {
fn drop(&mut self) {
// Invalidate the glyph run cache if it was our text run that got freed.
INDEX_OF_FIRST_GLYPH_RUN_CACHE.with(|index_of_first_glyph_run_cache| {
if let Some((text_run_ptr, _, _)) = index_of_first_glyph_run_cache.get() {
if text_run_ptr == (self as *const TextRun) {
index_of_first_glyph_run_cache.set(None);
}
}
})
}
}
/// A single series of glyphs within a text run.
#[derive(Clone, Deserialize, Serialize)]
pub struct GlyphRun {
/// The glyphs.
pub glyph_store: Arc<GlyphStore>,
/// The range of characters in the containing run.
pub range: Range<CharIndex>,
}
pub struct NaturalWordSliceIterator<'a> {
glyphs: &'a [GlyphRun],
index: usize,
range: Range<CharIndex>,
reverse: bool,
}
impl GlyphRun {
fn compare(&self, key: &CharIndex) -> Ordering {
if *key < self.range.begin() {
Ordering::Greater
} else if *key >= self.range.end() {
Ordering::Less
} else {
Ordering::Equal
}
}
}
/// A "slice" of a text run is a series of contiguous glyphs that all belong to the same glyph
/// store. Line breaking strategies yield these.
pub struct TextRunSlice<'a> {
/// The glyph store that the glyphs in this slice belong to.
pub glyphs: &'a GlyphStore,
/// The character index that this slice begins at, relative to the start of the *text run*.
pub offset: CharIndex,
/// The range that these glyphs encompass, relative to the start of the *glyph store*.
pub range: Range<CharIndex>,
}
impl<'a> TextRunSlice<'a> {
/// Returns the range that these glyphs encompass, relative to the start of the *text run*.
#[inline]
pub fn text_run_range(&self) -> Range<CharIndex> {
let mut range = self.range;
range.shift_by(self.offset);
range
}
}
impl<'a> Iterator for NaturalWordSliceIterator<'a> {
type Item = TextRunSlice<'a>;
// inline(always) due to the inefficient rt failures messing up inline heuristics, I think.
#[inline(always)]
fn next(&mut self) -> Option<TextRunSlice<'a>> | if!char_range.is_empty() {
Some(TextRunSlice {
glyphs: &*slice_glyphs.glyph_store,
offset: slice_range_begin,
range: char_range,
})
} else {
None
}
}
}
pub struct CharacterSliceIterator<'a> {
glyph_run: Option<&'a GlyphRun>,
glyph_run_iter: Iter<'a, GlyphRun>,
range: Range<CharIndex>,
}
impl<'a> Iterator for CharacterSliceIterator<'a> {
type Item = TextRunSlice<'a>;
// inline(always) due to the inefficient rt failures messing up inline heuristics, I think.
#[inline(always)]
fn next(&mut self) -> Option<TextRunSlice<'a>> {
let glyph_run = match self.glyph_run {
None => return None,
Some(glyph_run) => glyph_run,
};
debug_assert!(!self.range.is_empty());
let index_to_return = self.range.begin();
self.range.adjust_by(CharIndex(1), CharIndex(-1));
if self.range.is_empty() {
// We're done.
self.glyph_run = None
} else if self.range.intersect(&glyph_run.range).is_empty() {
// Move on to the next glyph run.
self.glyph_run = self.glyph_run_iter.next();
}
let index_within_glyph_run = index_to_return - glyph_run.range.begin();
Some(TextRunSlice {
glyphs: &*glyph_run.glyph_store,
offset: glyph_run.range.begin(),
range: Range::new(index_within_glyph_run, CharIndex(1)),
})
}
}
impl<'a> TextRun {
pub fn new(font: &mut Font, text: String, options: &ShapingOptions, bidi_level: u8) -> TextRun {
let glyphs = TextRun::break_and_shape(font, &text, options);
TextRun {
text: Arc::new(text),
font_metrics: font.metrics.clone(),
font_template: font.handle.template(),
font_key: font.font_key,
actual_pt_size: font.actual_pt_size,
glyphs: Arc::new(glyphs),
bidi_level: bidi_level,
}
}
pub fn break_and_shape(font: &mut Font, text: &str, options: &ShapingOptions)
-> Vec<GlyphRun> {
// TODO(Issue #230): do a better job. See Gecko's LineBreaker.
let mut glyphs = vec!();
let (mut byte_i, mut char_i) = (0, CharIndex(0));
let mut cur_slice_is_whitespace = false;
let (mut byte_last_boundary, mut char_last_boundary) = (0, CharIndex(0));
while byte_i < text.len() {
let range = text.char_range_at(byte_i);
let ch = range.ch;
let next = range.next;
// Slices alternate between whitespace and non-whitespace,
// representing line break opportunities.
let can_break_before = if cur_slice_is_whitespace {
match ch {
'' | '\t' | '\n' => false,
_ => {
cur_slice_is_whitespace = false;
true
}
}
} else {
match ch {
'' | '\t' | '\n' => {
cur_slice_is_whitespace = true;
true
},
_ => false
}
};
// Create a glyph store for this slice if it's nonempty.
if can_break_before && byte_i > byte_last_boundary {
let slice = &text[byte_last_boundary.. byte_i];
debug!("creating glyph store for slice {} (ws? {}), {} - {} in run {}",
slice,!cur_slice_is_whitespace, byte_last_boundary, byte_i, text);
let mut options = *options;
if!cur_slice_is_whitespace {
options.flags.insert(IS_WHITESPACE_SHAPING_FLAG);
}
glyphs.push(GlyphRun {
glyph_store: font.shape_text(slice, &options),
range: Range::new(char_last_boundary, char_i - char_last_boundary),
});
byte_last_boundary = byte_i;
char_last_boundary = char_i;
}
byte_i = next;
char_i = char_i + CharIndex(1);
}
// Create a glyph store for the final slice if it's nonempty.
if byte_i > byte_last_boundary {
let slice = &text[byte_last_boundary..];
debug!("creating glyph store for final slice {} (ws? {}), {} - {} in run {}",
slice, cur_slice_is_whitespace, byte_last_boundary, text.len(), text);
let mut options = *options;
if cur_slice_is_whitespace {
options.flags.insert(IS_WHITESPACE_SHAPING_FLAG);
}
glyphs.push(GlyphRun {
glyph_store: font.shape_text(slice, &options),
range: Range::new(char_last_boundary, char_i - char_last_boundary),
});
}
glyphs
}
pub fn ascent(&self) -> Au {
self.font_metrics.ascent
}
pub fn descent(&self) -> Au {
self.font_metrics.descent
}
pub fn advance_for_range(&self, range: &Range<CharIndex>) -> Au {
if range.is_empty() {
return Au(0)
}
// TODO(Issue #199): alter advance direction for RTL
// TODO(Issue #98): using inter-char and inter-word spacing settings when measuring text
self.natural_word_slices_in_range(range)
.fold(Au(0), |advance, slice| {
advance + slice.glyphs.advance_for_char_range(&slice.range)
})
}
pub fn metrics_for_range(&self, range: &Range<CharIndex>) -> RunMetrics {
RunMetrics::new(self.advance_for_range(range),
self.font_metrics.ascent,
self.font_metrics.descent)
}
pub fn metrics_for_slice(&self, glyphs: &GlyphStore, slice_range: &Range<CharIndex>)
-> RunMetrics {
RunMetrics::new(glyphs.advance_for_char_range(slice_range),
self.font_metrics.ascent,
self.font_metrics.descent)
}
pub fn min_width_for_range(&self, range: &Range<CharIndex>) -> Au {
debug!("iterating outer range {:?}", range);
self.natural_word_slices_in_range(range).fold(Au(0), |max_piece_width, slice| {
debug!("iterated on {:?}[{:?}]", slice.offset, slice.range);
max(max_piece_width, self.advance_for_range(&slice.range))
})
}
/// Returns the index of the first glyph run containing the given character index.
fn index_of_first_glyph_run_containing(&self, index: CharIndex) -> Option<usize> {
let self_ptr = self as *const TextRun;
INDEX_OF_FIRST_GLYPH_RUN_CACHE.with(|index_of_first_glyph_run_cache| {
if let Some((last_text_run, last_index, last_result)) =
index_of_first_glyph_run_cache.get() {
if last_text_run == self_ptr && last_index == index {
return Some(last_result)
}
}
if let Ok(result) = (&**self.glyphs).binary_search_by(|current| current.compare(&index)) {
index_of_first_glyph_run_cache.set(Some((self_ptr, index, result)));
Some(result)
} else {
None
}
})
}
/// Returns an iterator that will iterate over all slices of glyphs that represent natural
/// words in the given range.
pub fn natural_word_slices_in_range(&'a self, range: &Range<CharIndex>)
-> NaturalWordSliceIterator<'a> {
let index = match self.index_of_first_glyph_run_containing(range.begin()) {
None => self.glyphs.len(),
Some(index) => index,
};
NaturalWordSliceIterator {
glyphs: &self.glyphs[..],
index: index,
range: *range,
reverse: false,
}
}
/// Returns an iterator that over natural word slices in visual order (left to right or
/// right to left, depending on the bidirectional embedding level).
pub fn natural_word_slices_in_visual_order(&'a self, range: &Range<CharIndex>)
-> NaturalWordSliceIterator<'a> {
// Iterate in reverse order if bidi level is RTL.
let reverse = self.bidi_level % 2 == 1;
let index = if reverse {
match self.index_of_first_glyph_run_containing(range.end() - CharIndex(1)) {
Some(i) => i + 1, // In reverse mode, index points one past the next element.
None => 0
}
} else {
match self.index_of_first_glyph_run_containing(range.begin()) {
Some(i) => i,
None => self.glyphs.len()
}
};
NaturalWordSliceIterator {
glyphs: &self.glyphs[..],
index: index,
range: *range,
reverse: reverse,
}
}
/// Returns an iterator that will iterate over all slices of glyphs that represent individual
/// characters in the given range.
pub fn character_slices_in_range(&'a self, range: &Range<CharIndex>)
-> CharacterSliceIterator<'a> {
let index = match self.index_of_first_glyph_run_containing(range.begin()) {
None => self.glyphs.len(),
Some(index) => index,
};
let mut glyph_run_iter = self.glyphs[index..].iter();
let first_glyph_run = glyph_run_iter.next();
CharacterSliceIterator {
glyph_run: first_glyph_run,
glyph_run_iter: glyph_run_iter,
range: *range,
}
}
}
| {
let slice_glyphs;
if self.reverse {
if self.index == 0 {
return None;
}
self.index -= 1;
slice_glyphs = &self.glyphs[self.index];
} else {
if self.index >= self.glyphs.len() {
return None;
}
slice_glyphs = &self.glyphs[self.index];
self.index += 1;
}
let mut char_range = self.range.intersect(&slice_glyphs.range);
let slice_range_begin = slice_glyphs.range.begin();
char_range.shift_by(-slice_range_begin);
| identifier_body |
test_stale_read.rs | // Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::atomic::*;
use std::sync::{mpsc, Arc, Mutex};
use std::thread;
use std::time::Duration;
use kvproto::metapb::{Peer, Region};
use raft::eraftpb::MessageType;
use pd_client::PdClient;
use raftstore::store::Callback;
use test_raftstore::*;
use tikv_util::config::*;
use tikv_util::HandyRwLock;
fn stale_read_during_splitting(right_derive: bool) {
let count = 3;
let mut cluster = new_node_cluster(0, count);
cluster.cfg.raft_store.right_derive_when_split = right_derive;
let election_timeout = configure_for_lease_read(&mut cluster, None, None);
cluster.run();
// Write the initial values.
let key1 = b"k1";
let v1 = b"v1";
cluster.must_put(key1, v1);
let key2 = b"k2";
let v2 = b"v2";
cluster.must_put(key2, v2);
// Get the first region.
let region_left = cluster.get_region(key1);
let region_right = cluster.get_region(key2);
assert_eq!(region_left, region_right);
let region1 = region_left;
assert_eq!(region1.get_id(), 1);
let peer3 = region1
.get_peers()
.iter()
.find(|p| p.get_id() == 3)
.unwrap()
.clone();
cluster.must_transfer_leader(region1.get_id(), peer3.clone());
// Get the current leader.
let leader1 = peer3;
// Pause the apply worker of peer 3.
let apply_split = "apply_before_split_1_3";
fail::cfg(apply_split, "pause").unwrap();
// Split the first region.
cluster.split_region(®ion1, key2, Callback::write(Box::new(move |_| {})));
// Sleep for a while.
// The TiKVs that have followers of the old region will elected a leader
// of the new region.
// TiKV A TiKV B TiKV C
// Region 1 L F F
// Region 2 X L F
// Note: A has the peer 3,
// L: leader, F: follower, X: peer is not ready.
thread::sleep(election_timeout);
// A key that is covered by the old region and the new region.
let stale_key = if right_derive { key1 } else { key2 };
// Get the new region.
let region2 = cluster.get_region_with(stale_key, |region| region!= ®ion1);
// Get the leader of the new region.
let leader2 = cluster.leader_of_region(region2.get_id()).unwrap();
assert_ne!(leader1.get_store_id(), leader2.get_store_id());
must_not_stale_read(
&mut cluster,
stale_key,
®ion1,
&leader1,
®ion2,
&leader2,
apply_split,
);
}
fn must_not_stale_read(
cluster: &mut Cluster<NodeCluster>,
stale_key: &[u8],
old_region: &Region,
old_leader: &Peer,
new_region: &Region,
new_leader: &Peer,
fp: &str,
) {
// A new value for stale_key.
let v3 = b"v3";
let mut request = new_request(
new_region.get_id(),
new_region.get_region_epoch().clone(),
vec![new_put_cf_cmd("default", stale_key, v3)],
false,
);
request.mut_header().set_peer(new_leader.clone());
cluster
.call_command_on_node(new_leader.get_store_id(), request, Duration::from_secs(5))
.unwrap();
// LocalRead.
let read_quorum = false;
must_not_eq_on_key(
cluster,
stale_key,
v3,
read_quorum,
old_region,
old_leader,
new_region,
new_leader,
);
// ReadIndex.
let read_quorum = true;
must_not_eq_on_key(
cluster,
stale_key,
v3,
read_quorum,
old_region,
old_leader,
new_region,
new_leader,
);
// Leaders can always propose read index despite split/merge.
let propose_readindex = "before_propose_readindex";
fail::cfg(propose_readindex, "return(true)").unwrap();
// Can not execute reads that are queued.
let value1 = read_on_peer(
cluster,
old_leader.clone(),
old_region.clone(),
stale_key,
read_quorum,
Duration::from_secs(1),
);
debug!("stale_key: {:?}, {:?}", stale_key, value1);
value1.unwrap_err(); // Error::Timeout
// Remove the fp.
fail::remove(fp);
// It should read an error instead of timeout.
let value1 = read_on_peer(
cluster,
old_leader.clone(),
old_region.clone(),
stale_key,
read_quorum,
Duration::from_secs(5),
);
debug!("stale_key: {:?}, {:?}", stale_key, value1);
assert!(value1.unwrap().get_header().has_error());
// Clean up.
fail::remove(propose_readindex);
}
fn must_not_eq_on_key(
cluster: &mut Cluster<NodeCluster>,
key: &[u8],
value: &[u8],
read_quorum: bool,
old_region: &Region,
old_leader: &Peer,
new_region: &Region,
new_leader: &Peer,
) {
let value1 = read_on_peer(
cluster,
old_leader.clone(),
old_region.clone(),
key,
read_quorum,
Duration::from_secs(1),
);
let value2 = read_on_peer(
cluster,
new_leader.clone(),
new_region.clone(),
key,
read_quorum,
Duration::from_secs(1),
);
debug!("stale_key: {:?}, {:?} vs {:?}", key, value1, value2);
assert_eq!(must_get_value(value2.as_ref().unwrap()).as_slice(), value);
// The old leader should return an error.
assert!(
value1.as_ref().unwrap().get_header().has_error(),
"{:?}",
value1
);
}
#[test]
fn test_node_stale_read_during_splitting_left_derive() {
stale_read_during_splitting(false);
}
#[test]
fn test_node_stale_read_during_splitting_right_derive() {
stale_read_during_splitting(true);
}
#[test]
fn test_stale_read_during_merging() {
let count = 3;
let mut cluster = new_node_cluster(0, count);
configure_for_merge(&mut cluster);
let election_timeout = configure_for_lease_read(&mut cluster, None, None);
cluster.cfg.raft_store.right_derive_when_split = false;
cluster.cfg.raft_store.pd_heartbeat_tick_interval =
cluster.cfg.raft_store.raft_base_tick_interval;
debug!("max leader lease: {:?}", election_timeout);
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
cluster.run_conf_change();
// Write the initial values.
let key1 = b"k1";
let v1 = b"v1";
cluster.must_put(key1, v1);
let key2 = b"k2";
let v2 = b"v2";
cluster.must_put(key2, v2);
let region = pd_client.get_region(b"k1").unwrap();
pd_client.must_add_peer(region.get_id(), new_peer(2, 4));
pd_client.must_add_peer(region.get_id(), new_peer(3, 5));
cluster.must_split(®ion, b"k2");
let mut region1 = cluster.get_region(key1);
let mut region1000 = cluster.get_region(key2);
assert_ne!(region1, region1000);
assert_eq!(region1.get_id(), 1); // requires disable right_derive.
let leader1 = region1
.get_peers()
.iter()
.find(|p| p.get_id() == 4)
.unwrap()
.clone();
cluster.must_transfer_leader(region1.get_id(), leader1.clone());
let leader1000 = region1000
.get_peers()
.iter()
.find(|p| p.get_store_id()!= leader1.get_store_id())
.unwrap()
.clone();
cluster.must_transfer_leader(region1000.get_id(), leader1000.clone());
assert_ne!(leader1.get_store_id(), leader1000.get_store_id());
// Sleeps an election timeout. The new leader needs enough time to gather
// all followers progress, in cause the merge request is reject by the
// log gap too large (min_progress == 0).
thread::sleep(election_timeout);
// merge into
// region1000 ------------> region1
cluster.must_try_merge(region1000.get_id(), region1.get_id());
// Pause the apply workers except for the peer 4.
let apply_commit_merge = "apply_before_commit_merge_except_1_4";
fail::cfg(apply_commit_merge, "pause").unwrap();
// Wait for commit merge.
// The TiKVs that have followers of the old region will elected a leader
// of the new region.
// TiKV A TiKV B TiKV C
// Region 1 L F F
// Region 1000 F L F
// after wait
// Region 1 L F F
// Region 1000 X L F
// Note: L: leader, F: follower, X: peer is not exist.
// TODO: what if cluster runs slow and lease is expired.
// Epoch changed by prepare merge.
// We can not use `get_region_with` to get the latest info of reigon 1000,
// because leader1 is not paused, it executes commit merge very fast
// and reports pd, its range covers region1000.
//
// region1000 does prepare merge, it increases ver and conf_ver by 1.
debug!("before merge: {:?} | {:?}", region1000, region1);
let region1000_version = region1000.get_region_epoch().get_version() + 1;
region1000
.mut_region_epoch()
.set_version(region1000_version);
let region1000_conf_version = region1000.get_region_epoch().get_conf_ver() + 1;
region1000
.mut_region_epoch()
.set_conf_ver(region1000_conf_version);
// Epoch changed by commit merge.
region1 = cluster.get_region_with(key1, |region| region!= ®ion1);
debug!("after merge: {:?} | {:?}", region1000, region1);
| stale_key,
®ion1000,
&leader1000,
®ion1,
&leader1,
apply_commit_merge,
);
}
#[test]
fn test_read_index_when_transfer_leader_2() {
let mut cluster = new_node_cluster(0, 3);
// Increase the election tick to make this test case running reliably.
configure_for_lease_read(&mut cluster, Some(50), Some(10_000));
// Stop log compaction to transfer leader with filter easier.
configure_for_request_snapshot(&mut cluster);
let max_lease = Duration::from_secs(2);
cluster.cfg.raft_store.raft_store_max_leader_lease = ReadableDuration(max_lease);
// Add peer 2 and 3 and wait them to apply it.
cluster.pd_client.disable_default_operator();
let r1 = cluster.run_conf_change();
cluster.must_put(b"k0", b"v0");
cluster.pd_client.must_add_peer(r1, new_peer(2, 2));
cluster.pd_client.must_add_peer(r1, new_peer(3, 3));
must_get_equal(&cluster.get_engine(2), b"k0", b"v0");
must_get_equal(&cluster.get_engine(3), b"k0", b"v0");
// Put and test again to ensure that peer 3 get the latest writes by message append
// instead of snapshot, so that transfer leader to peer 3 can 100% success.
cluster.must_put(b"k1", b"v1");
must_get_equal(&cluster.get_engine(2), b"k1", b"v1");
must_get_equal(&cluster.get_engine(3), b"k1", b"v1");
let r1 = cluster.get_region(b"k1");
let old_leader = cluster.leader_of_region(r1.get_id()).unwrap();
// Use a macro instead of a closure to avoid any capture of local variables.
macro_rules! read_on_old_leader {
() => {{
let (tx, rx) = mpsc::sync_channel(1);
let mut read_request = new_request(
r1.get_id(),
r1.get_region_epoch().clone(),
vec![new_get_cmd(b"k1")],
true, // read quorum
);
read_request.mut_header().set_peer(new_peer(1, 1));
let sim = cluster.sim.wl();
sim.async_command_on_node(
old_leader.get_id(),
read_request,
Callback::Read(Box::new(move |resp| tx.send(resp.response).unwrap())),
)
.unwrap();
rx
}};
}
// Delay all raft messages to peer 1.
let dropped_msgs = Arc::new(Mutex::new(Vec::new()));
let filter = Box::new(
RegionPacketFilter::new(r1.get_id(), old_leader.get_store_id())
.direction(Direction::Recv)
.skip(MessageType::MsgTransferLeader)
.when(Arc::new(AtomicBool::new(true)))
.reserve_dropped(Arc::clone(&dropped_msgs)),
);
cluster
.sim
.wl()
.add_recv_filter(old_leader.get_id(), filter);
let resp1 = read_on_old_leader!();
cluster.must_transfer_leader(r1.get_id(), new_peer(3, 3));
let resp2 = read_on_old_leader!();
// Unpark all pending messages and clear all filters.
let router = cluster.sim.wl().get_router(old_leader.get_id()).unwrap();
let mut reserved_msgs = Vec::new();
'LOOP: loop {
for raft_msg in std::mem::take(&mut *dropped_msgs.lock().unwrap()) {
let msg_type = raft_msg.get_message().get_msg_type();
if msg_type == MessageType::MsgHeartbeatResponse || msg_type == MessageType::MsgAppend {
reserved_msgs.push(raft_msg);
if msg_type == MessageType::MsgAppend {
break 'LOOP;
}
}
}
}
// Resume reserved messages in one batch to make sure the old leader can get read and role
// change in one `Ready`.
fail::cfg("pause_on_peer_collect_message", "pause").unwrap();
for raft_msg in reserved_msgs {
router.send_raft_message(raft_msg).unwrap();
}
fail::cfg("pause_on_peer_collect_message", "off").unwrap();
cluster.sim.wl().clear_recv_filters(old_leader.get_id());
let resp1 = resp1.recv().unwrap();
assert!(resp1.get_header().get_error().has_stale_command());
// Response 2 should contains an error.
let resp2 = resp2.recv().unwrap();
assert!(resp2.get_header().get_error().has_stale_command());
drop(cluster);
fail::remove("pause_on_peer_collect_message");
}
#[test]
fn test_read_after_peer_destroyed() {
let mut cluster = new_node_cluster(0, 3);
let pd_client = cluster.pd_client.clone();
// Disable default max peer number check.
pd_client.disable_default_operator();
let r1 = cluster.run_conf_change();
// Add 2 peers.
for i in 2..4 {
pd_client.must_add_peer(r1, new_peer(i, i));
}
// Make sure peer 1 leads the region.
cluster.must_transfer_leader(r1, new_peer(1, 1));
let (key, value) = (b"k1", b"v1");
cluster.must_put(key, value);
assert_eq!(cluster.get(key), Some(value.to_vec()));
let destroy_peer_fp = "destroy_peer";
fail::cfg(destroy_peer_fp, "pause").unwrap();
pd_client.must_remove_peer(r1, new_peer(1, 1));
sleep_ms(300);
// Try writing k2 to peer3
let mut request = new_request(
r1,
cluster.pd_client.get_region_epoch(r1),
vec![new_get_cmd(b"k1")],
false,
);
request.mut_header().set_peer(new_peer(1, 1));
let (cb, rx) = make_cb(&request);
cluster
.sim
.rl()
.async_command_on_node(1, request, cb)
.unwrap();
// Wait for raftstore receives the read request.
sleep_ms(200);
fail::remove(destroy_peer_fp);
let resp = rx.recv_timeout(Duration::from_millis(200)).unwrap();
assert!(
resp.get_header().get_error().has_region_not_found(),
"{:?}",
resp
);
}
/// In previous implementation, we suspect the leader lease at the position of `leader_commit_prepare_merge`
/// failpoint when `PrepareMerge` log is committed, which is too late to prevent stale read.
#[test]
fn test_stale_read_during_merging_2() {
let mut cluster = new_node_cluster(0, 3);
let pd_client = cluster.pd_client.clone();
pd_client.disable_default_operator();
configure_for_merge(&mut cluster);
configure_for_lease_read(&mut cluster, Some(50), Some(20));
cluster.run();
for i in 0..10 {
cluster.must_put(format!("k{}", i).as_bytes(), b"v");
}
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k2").unwrap();
let left_peer_1 = find_peer(&left, 1).unwrap().to_owned();
cluster.must_transfer_leader(left.get_id(), left_peer_1.clone());
let right_peer_3 = find_peer(&right, 3).unwrap().to_owned();
cluster.must_transfer_leader(right.get_id(), right_peer_3);
let leader_commit_prepare_merge_fp = "leader_commit_prepare_merge";
fail::cfg(leader_commit_prepare_merge_fp, "pause").unwrap();
pd_client.must_merge(left.get_id(), right.get_id());
cluster.must_put(b"k1", b"v1");
let value = read_on_peer(
&mut cluster,
left_peer_1,
left,
b"k1",
false,
Duration::from_millis(200),
);
// The leader lease must be suspected so the local read is forbidden.
// The result should be Error::Timeout because the leader is paused at
// the position of `leader_commit_prepare_merge` failpoint.
// In previous implementation, the result is ok and the value is "v"
// but the right answer is "v1".
value.unwrap_err();
fail::remove(leader_commit_prepare_merge_fp);
} | // A key that is covered by region 1000 and region 1.
let stale_key = key2;
must_not_stale_read(
&mut cluster, | random_line_split |
test_stale_read.rs | // Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::atomic::*;
use std::sync::{mpsc, Arc, Mutex};
use std::thread;
use std::time::Duration;
use kvproto::metapb::{Peer, Region};
use raft::eraftpb::MessageType;
use pd_client::PdClient;
use raftstore::store::Callback;
use test_raftstore::*;
use tikv_util::config::*;
use tikv_util::HandyRwLock;
fn stale_read_during_splitting(right_derive: bool) {
let count = 3;
let mut cluster = new_node_cluster(0, count);
cluster.cfg.raft_store.right_derive_when_split = right_derive;
let election_timeout = configure_for_lease_read(&mut cluster, None, None);
cluster.run();
// Write the initial values.
let key1 = b"k1";
let v1 = b"v1";
cluster.must_put(key1, v1);
let key2 = b"k2";
let v2 = b"v2";
cluster.must_put(key2, v2);
// Get the first region.
let region_left = cluster.get_region(key1);
let region_right = cluster.get_region(key2);
assert_eq!(region_left, region_right);
let region1 = region_left;
assert_eq!(region1.get_id(), 1);
let peer3 = region1
.get_peers()
.iter()
.find(|p| p.get_id() == 3)
.unwrap()
.clone();
cluster.must_transfer_leader(region1.get_id(), peer3.clone());
// Get the current leader.
let leader1 = peer3;
// Pause the apply worker of peer 3.
let apply_split = "apply_before_split_1_3";
fail::cfg(apply_split, "pause").unwrap();
// Split the first region.
cluster.split_region(®ion1, key2, Callback::write(Box::new(move |_| {})));
// Sleep for a while.
// The TiKVs that have followers of the old region will elected a leader
// of the new region.
// TiKV A TiKV B TiKV C
// Region 1 L F F
// Region 2 X L F
// Note: A has the peer 3,
// L: leader, F: follower, X: peer is not ready.
thread::sleep(election_timeout);
// A key that is covered by the old region and the new region.
let stale_key = if right_derive { key1 } else { key2 };
// Get the new region.
let region2 = cluster.get_region_with(stale_key, |region| region!= ®ion1);
// Get the leader of the new region.
let leader2 = cluster.leader_of_region(region2.get_id()).unwrap();
assert_ne!(leader1.get_store_id(), leader2.get_store_id());
must_not_stale_read(
&mut cluster,
stale_key,
®ion1,
&leader1,
®ion2,
&leader2,
apply_split,
);
}
fn must_not_stale_read(
cluster: &mut Cluster<NodeCluster>,
stale_key: &[u8],
old_region: &Region,
old_leader: &Peer,
new_region: &Region,
new_leader: &Peer,
fp: &str,
) {
// A new value for stale_key.
let v3 = b"v3";
let mut request = new_request(
new_region.get_id(),
new_region.get_region_epoch().clone(),
vec![new_put_cf_cmd("default", stale_key, v3)],
false,
);
request.mut_header().set_peer(new_leader.clone());
cluster
.call_command_on_node(new_leader.get_store_id(), request, Duration::from_secs(5))
.unwrap();
// LocalRead.
let read_quorum = false;
must_not_eq_on_key(
cluster,
stale_key,
v3,
read_quorum,
old_region,
old_leader,
new_region,
new_leader,
);
// ReadIndex.
let read_quorum = true;
must_not_eq_on_key(
cluster,
stale_key,
v3,
read_quorum,
old_region,
old_leader,
new_region,
new_leader,
);
// Leaders can always propose read index despite split/merge.
let propose_readindex = "before_propose_readindex";
fail::cfg(propose_readindex, "return(true)").unwrap();
// Can not execute reads that are queued.
let value1 = read_on_peer(
cluster,
old_leader.clone(),
old_region.clone(),
stale_key,
read_quorum,
Duration::from_secs(1),
);
debug!("stale_key: {:?}, {:?}", stale_key, value1);
value1.unwrap_err(); // Error::Timeout
// Remove the fp.
fail::remove(fp);
// It should read an error instead of timeout.
let value1 = read_on_peer(
cluster,
old_leader.clone(),
old_region.clone(),
stale_key,
read_quorum,
Duration::from_secs(5),
);
debug!("stale_key: {:?}, {:?}", stale_key, value1);
assert!(value1.unwrap().get_header().has_error());
// Clean up.
fail::remove(propose_readindex);
}
fn must_not_eq_on_key(
cluster: &mut Cluster<NodeCluster>,
key: &[u8],
value: &[u8],
read_quorum: bool,
old_region: &Region,
old_leader: &Peer,
new_region: &Region,
new_leader: &Peer,
) | assert!(
value1.as_ref().unwrap().get_header().has_error(),
"{:?}",
value1
);
}
#[test]
fn test_node_stale_read_during_splitting_left_derive() {
stale_read_during_splitting(false);
}
#[test]
fn test_node_stale_read_during_splitting_right_derive() {
stale_read_during_splitting(true);
}
#[test]
fn test_stale_read_during_merging() {
let count = 3;
let mut cluster = new_node_cluster(0, count);
configure_for_merge(&mut cluster);
let election_timeout = configure_for_lease_read(&mut cluster, None, None);
cluster.cfg.raft_store.right_derive_when_split = false;
cluster.cfg.raft_store.pd_heartbeat_tick_interval =
cluster.cfg.raft_store.raft_base_tick_interval;
debug!("max leader lease: {:?}", election_timeout);
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
cluster.run_conf_change();
// Write the initial values.
let key1 = b"k1";
let v1 = b"v1";
cluster.must_put(key1, v1);
let key2 = b"k2";
let v2 = b"v2";
cluster.must_put(key2, v2);
let region = pd_client.get_region(b"k1").unwrap();
pd_client.must_add_peer(region.get_id(), new_peer(2, 4));
pd_client.must_add_peer(region.get_id(), new_peer(3, 5));
cluster.must_split(®ion, b"k2");
let mut region1 = cluster.get_region(key1);
let mut region1000 = cluster.get_region(key2);
assert_ne!(region1, region1000);
assert_eq!(region1.get_id(), 1); // requires disable right_derive.
let leader1 = region1
.get_peers()
.iter()
.find(|p| p.get_id() == 4)
.unwrap()
.clone();
cluster.must_transfer_leader(region1.get_id(), leader1.clone());
let leader1000 = region1000
.get_peers()
.iter()
.find(|p| p.get_store_id()!= leader1.get_store_id())
.unwrap()
.clone();
cluster.must_transfer_leader(region1000.get_id(), leader1000.clone());
assert_ne!(leader1.get_store_id(), leader1000.get_store_id());
// Sleeps an election timeout. The new leader needs enough time to gather
// all followers progress, in cause the merge request is reject by the
// log gap too large (min_progress == 0).
thread::sleep(election_timeout);
// merge into
// region1000 ------------> region1
cluster.must_try_merge(region1000.get_id(), region1.get_id());
// Pause the apply workers except for the peer 4.
let apply_commit_merge = "apply_before_commit_merge_except_1_4";
fail::cfg(apply_commit_merge, "pause").unwrap();
// Wait for commit merge.
// The TiKVs that have followers of the old region will elected a leader
// of the new region.
// TiKV A TiKV B TiKV C
// Region 1 L F F
// Region 1000 F L F
// after wait
// Region 1 L F F
// Region 1000 X L F
// Note: L: leader, F: follower, X: peer is not exist.
// TODO: what if cluster runs slow and lease is expired.
// Epoch changed by prepare merge.
// We can not use `get_region_with` to get the latest info of reigon 1000,
// because leader1 is not paused, it executes commit merge very fast
// and reports pd, its range covers region1000.
//
// region1000 does prepare merge, it increases ver and conf_ver by 1.
debug!("before merge: {:?} | {:?}", region1000, region1);
let region1000_version = region1000.get_region_epoch().get_version() + 1;
region1000
.mut_region_epoch()
.set_version(region1000_version);
let region1000_conf_version = region1000.get_region_epoch().get_conf_ver() + 1;
region1000
.mut_region_epoch()
.set_conf_ver(region1000_conf_version);
// Epoch changed by commit merge.
region1 = cluster.get_region_with(key1, |region| region!= ®ion1);
debug!("after merge: {:?} | {:?}", region1000, region1);
// A key that is covered by region 1000 and region 1.
let stale_key = key2;
must_not_stale_read(
&mut cluster,
stale_key,
®ion1000,
&leader1000,
®ion1,
&leader1,
apply_commit_merge,
);
}
#[test]
fn test_read_index_when_transfer_leader_2() {
let mut cluster = new_node_cluster(0, 3);
// Increase the election tick to make this test case running reliably.
configure_for_lease_read(&mut cluster, Some(50), Some(10_000));
// Stop log compaction to transfer leader with filter easier.
configure_for_request_snapshot(&mut cluster);
let max_lease = Duration::from_secs(2);
cluster.cfg.raft_store.raft_store_max_leader_lease = ReadableDuration(max_lease);
// Add peer 2 and 3 and wait them to apply it.
cluster.pd_client.disable_default_operator();
let r1 = cluster.run_conf_change();
cluster.must_put(b"k0", b"v0");
cluster.pd_client.must_add_peer(r1, new_peer(2, 2));
cluster.pd_client.must_add_peer(r1, new_peer(3, 3));
must_get_equal(&cluster.get_engine(2), b"k0", b"v0");
must_get_equal(&cluster.get_engine(3), b"k0", b"v0");
// Put and test again to ensure that peer 3 get the latest writes by message append
// instead of snapshot, so that transfer leader to peer 3 can 100% success.
cluster.must_put(b"k1", b"v1");
must_get_equal(&cluster.get_engine(2), b"k1", b"v1");
must_get_equal(&cluster.get_engine(3), b"k1", b"v1");
let r1 = cluster.get_region(b"k1");
let old_leader = cluster.leader_of_region(r1.get_id()).unwrap();
// Use a macro instead of a closure to avoid any capture of local variables.
macro_rules! read_on_old_leader {
() => {{
let (tx, rx) = mpsc::sync_channel(1);
let mut read_request = new_request(
r1.get_id(),
r1.get_region_epoch().clone(),
vec![new_get_cmd(b"k1")],
true, // read quorum
);
read_request.mut_header().set_peer(new_peer(1, 1));
let sim = cluster.sim.wl();
sim.async_command_on_node(
old_leader.get_id(),
read_request,
Callback::Read(Box::new(move |resp| tx.send(resp.response).unwrap())),
)
.unwrap();
rx
}};
}
// Delay all raft messages to peer 1.
let dropped_msgs = Arc::new(Mutex::new(Vec::new()));
let filter = Box::new(
RegionPacketFilter::new(r1.get_id(), old_leader.get_store_id())
.direction(Direction::Recv)
.skip(MessageType::MsgTransferLeader)
.when(Arc::new(AtomicBool::new(true)))
.reserve_dropped(Arc::clone(&dropped_msgs)),
);
cluster
.sim
.wl()
.add_recv_filter(old_leader.get_id(), filter);
let resp1 = read_on_old_leader!();
cluster.must_transfer_leader(r1.get_id(), new_peer(3, 3));
let resp2 = read_on_old_leader!();
// Unpark all pending messages and clear all filters.
let router = cluster.sim.wl().get_router(old_leader.get_id()).unwrap();
let mut reserved_msgs = Vec::new();
'LOOP: loop {
for raft_msg in std::mem::take(&mut *dropped_msgs.lock().unwrap()) {
let msg_type = raft_msg.get_message().get_msg_type();
if msg_type == MessageType::MsgHeartbeatResponse || msg_type == MessageType::MsgAppend {
reserved_msgs.push(raft_msg);
if msg_type == MessageType::MsgAppend {
break 'LOOP;
}
}
}
}
// Resume reserved messages in one batch to make sure the old leader can get read and role
// change in one `Ready`.
fail::cfg("pause_on_peer_collect_message", "pause").unwrap();
for raft_msg in reserved_msgs {
router.send_raft_message(raft_msg).unwrap();
}
fail::cfg("pause_on_peer_collect_message", "off").unwrap();
cluster.sim.wl().clear_recv_filters(old_leader.get_id());
let resp1 = resp1.recv().unwrap();
assert!(resp1.get_header().get_error().has_stale_command());
// Response 2 should contains an error.
let resp2 = resp2.recv().unwrap();
assert!(resp2.get_header().get_error().has_stale_command());
drop(cluster);
fail::remove("pause_on_peer_collect_message");
}
#[test]
fn test_read_after_peer_destroyed() {
let mut cluster = new_node_cluster(0, 3);
let pd_client = cluster.pd_client.clone();
// Disable default max peer number check.
pd_client.disable_default_operator();
let r1 = cluster.run_conf_change();
// Add 2 peers.
for i in 2..4 {
pd_client.must_add_peer(r1, new_peer(i, i));
}
// Make sure peer 1 leads the region.
cluster.must_transfer_leader(r1, new_peer(1, 1));
let (key, value) = (b"k1", b"v1");
cluster.must_put(key, value);
assert_eq!(cluster.get(key), Some(value.to_vec()));
let destroy_peer_fp = "destroy_peer";
fail::cfg(destroy_peer_fp, "pause").unwrap();
pd_client.must_remove_peer(r1, new_peer(1, 1));
sleep_ms(300);
// Try writing k2 to peer3
let mut request = new_request(
r1,
cluster.pd_client.get_region_epoch(r1),
vec![new_get_cmd(b"k1")],
false,
);
request.mut_header().set_peer(new_peer(1, 1));
let (cb, rx) = make_cb(&request);
cluster
.sim
.rl()
.async_command_on_node(1, request, cb)
.unwrap();
// Wait for raftstore receives the read request.
sleep_ms(200);
fail::remove(destroy_peer_fp);
let resp = rx.recv_timeout(Duration::from_millis(200)).unwrap();
assert!(
resp.get_header().get_error().has_region_not_found(),
"{:?}",
resp
);
}
/// In previous implementation, we suspect the leader lease at the position of `leader_commit_prepare_merge`
/// failpoint when `PrepareMerge` log is committed, which is too late to prevent stale read.
#[test]
fn test_stale_read_during_merging_2() {
let mut cluster = new_node_cluster(0, 3);
let pd_client = cluster.pd_client.clone();
pd_client.disable_default_operator();
configure_for_merge(&mut cluster);
configure_for_lease_read(&mut cluster, Some(50), Some(20));
cluster.run();
for i in 0..10 {
cluster.must_put(format!("k{}", i).as_bytes(), b"v");
}
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k2").unwrap();
let left_peer_1 = find_peer(&left, 1).unwrap().to_owned();
cluster.must_transfer_leader(left.get_id(), left_peer_1.clone());
let right_peer_3 = find_peer(&right, 3).unwrap().to_owned();
cluster.must_transfer_leader(right.get_id(), right_peer_3);
let leader_commit_prepare_merge_fp = "leader_commit_prepare_merge";
fail::cfg(leader_commit_prepare_merge_fp, "pause").unwrap();
pd_client.must_merge(left.get_id(), right.get_id());
cluster.must_put(b"k1", b"v1");
let value = read_on_peer(
&mut cluster,
left_peer_1,
left,
b"k1",
false,
Duration::from_millis(200),
);
// The leader lease must be suspected so the local read is forbidden.
// The result should be Error::Timeout because the leader is paused at
// the position of `leader_commit_prepare_merge` failpoint.
// In previous implementation, the result is ok and the value is "v"
// but the right answer is "v1".
value.unwrap_err();
fail::remove(leader_commit_prepare_merge_fp);
}
| {
let value1 = read_on_peer(
cluster,
old_leader.clone(),
old_region.clone(),
key,
read_quorum,
Duration::from_secs(1),
);
let value2 = read_on_peer(
cluster,
new_leader.clone(),
new_region.clone(),
key,
read_quorum,
Duration::from_secs(1),
);
debug!("stale_key: {:?}, {:?} vs {:?}", key, value1, value2);
assert_eq!(must_get_value(value2.as_ref().unwrap()).as_slice(), value);
// The old leader should return an error. | identifier_body |
test_stale_read.rs | // Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::atomic::*;
use std::sync::{mpsc, Arc, Mutex};
use std::thread;
use std::time::Duration;
use kvproto::metapb::{Peer, Region};
use raft::eraftpb::MessageType;
use pd_client::PdClient;
use raftstore::store::Callback;
use test_raftstore::*;
use tikv_util::config::*;
use tikv_util::HandyRwLock;
fn stale_read_during_splitting(right_derive: bool) {
let count = 3;
let mut cluster = new_node_cluster(0, count);
cluster.cfg.raft_store.right_derive_when_split = right_derive;
let election_timeout = configure_for_lease_read(&mut cluster, None, None);
cluster.run();
// Write the initial values.
let key1 = b"k1";
let v1 = b"v1";
cluster.must_put(key1, v1);
let key2 = b"k2";
let v2 = b"v2";
cluster.must_put(key2, v2);
// Get the first region.
let region_left = cluster.get_region(key1);
let region_right = cluster.get_region(key2);
assert_eq!(region_left, region_right);
let region1 = region_left;
assert_eq!(region1.get_id(), 1);
let peer3 = region1
.get_peers()
.iter()
.find(|p| p.get_id() == 3)
.unwrap()
.clone();
cluster.must_transfer_leader(region1.get_id(), peer3.clone());
// Get the current leader.
let leader1 = peer3;
// Pause the apply worker of peer 3.
let apply_split = "apply_before_split_1_3";
fail::cfg(apply_split, "pause").unwrap();
// Split the first region.
cluster.split_region(®ion1, key2, Callback::write(Box::new(move |_| {})));
// Sleep for a while.
// The TiKVs that have followers of the old region will elected a leader
// of the new region.
// TiKV A TiKV B TiKV C
// Region 1 L F F
// Region 2 X L F
// Note: A has the peer 3,
// L: leader, F: follower, X: peer is not ready.
thread::sleep(election_timeout);
// A key that is covered by the old region and the new region.
let stale_key = if right_derive { key1 } else { key2 };
// Get the new region.
let region2 = cluster.get_region_with(stale_key, |region| region!= ®ion1);
// Get the leader of the new region.
let leader2 = cluster.leader_of_region(region2.get_id()).unwrap();
assert_ne!(leader1.get_store_id(), leader2.get_store_id());
must_not_stale_read(
&mut cluster,
stale_key,
®ion1,
&leader1,
®ion2,
&leader2,
apply_split,
);
}
fn must_not_stale_read(
cluster: &mut Cluster<NodeCluster>,
stale_key: &[u8],
old_region: &Region,
old_leader: &Peer,
new_region: &Region,
new_leader: &Peer,
fp: &str,
) {
// A new value for stale_key.
let v3 = b"v3";
let mut request = new_request(
new_region.get_id(),
new_region.get_region_epoch().clone(),
vec![new_put_cf_cmd("default", stale_key, v3)],
false,
);
request.mut_header().set_peer(new_leader.clone());
cluster
.call_command_on_node(new_leader.get_store_id(), request, Duration::from_secs(5))
.unwrap();
// LocalRead.
let read_quorum = false;
must_not_eq_on_key(
cluster,
stale_key,
v3,
read_quorum,
old_region,
old_leader,
new_region,
new_leader,
);
// ReadIndex.
let read_quorum = true;
must_not_eq_on_key(
cluster,
stale_key,
v3,
read_quorum,
old_region,
old_leader,
new_region,
new_leader,
);
// Leaders can always propose read index despite split/merge.
let propose_readindex = "before_propose_readindex";
fail::cfg(propose_readindex, "return(true)").unwrap();
// Can not execute reads that are queued.
let value1 = read_on_peer(
cluster,
old_leader.clone(),
old_region.clone(),
stale_key,
read_quorum,
Duration::from_secs(1),
);
debug!("stale_key: {:?}, {:?}", stale_key, value1);
value1.unwrap_err(); // Error::Timeout
// Remove the fp.
fail::remove(fp);
// It should read an error instead of timeout.
let value1 = read_on_peer(
cluster,
old_leader.clone(),
old_region.clone(),
stale_key,
read_quorum,
Duration::from_secs(5),
);
debug!("stale_key: {:?}, {:?}", stale_key, value1);
assert!(value1.unwrap().get_header().has_error());
// Clean up.
fail::remove(propose_readindex);
}
fn must_not_eq_on_key(
cluster: &mut Cluster<NodeCluster>,
key: &[u8],
value: &[u8],
read_quorum: bool,
old_region: &Region,
old_leader: &Peer,
new_region: &Region,
new_leader: &Peer,
) {
let value1 = read_on_peer(
cluster,
old_leader.clone(),
old_region.clone(),
key,
read_quorum,
Duration::from_secs(1),
);
let value2 = read_on_peer(
cluster,
new_leader.clone(),
new_region.clone(),
key,
read_quorum,
Duration::from_secs(1),
);
debug!("stale_key: {:?}, {:?} vs {:?}", key, value1, value2);
assert_eq!(must_get_value(value2.as_ref().unwrap()).as_slice(), value);
// The old leader should return an error.
assert!(
value1.as_ref().unwrap().get_header().has_error(),
"{:?}",
value1
);
}
#[test]
fn test_node_stale_read_during_splitting_left_derive() {
stale_read_during_splitting(false);
}
#[test]
fn test_node_stale_read_during_splitting_right_derive() {
stale_read_during_splitting(true);
}
#[test]
fn test_stale_read_during_merging() {
let count = 3;
let mut cluster = new_node_cluster(0, count);
configure_for_merge(&mut cluster);
let election_timeout = configure_for_lease_read(&mut cluster, None, None);
cluster.cfg.raft_store.right_derive_when_split = false;
cluster.cfg.raft_store.pd_heartbeat_tick_interval =
cluster.cfg.raft_store.raft_base_tick_interval;
debug!("max leader lease: {:?}", election_timeout);
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
cluster.run_conf_change();
// Write the initial values.
let key1 = b"k1";
let v1 = b"v1";
cluster.must_put(key1, v1);
let key2 = b"k2";
let v2 = b"v2";
cluster.must_put(key2, v2);
let region = pd_client.get_region(b"k1").unwrap();
pd_client.must_add_peer(region.get_id(), new_peer(2, 4));
pd_client.must_add_peer(region.get_id(), new_peer(3, 5));
cluster.must_split(®ion, b"k2");
let mut region1 = cluster.get_region(key1);
let mut region1000 = cluster.get_region(key2);
assert_ne!(region1, region1000);
assert_eq!(region1.get_id(), 1); // requires disable right_derive.
let leader1 = region1
.get_peers()
.iter()
.find(|p| p.get_id() == 4)
.unwrap()
.clone();
cluster.must_transfer_leader(region1.get_id(), leader1.clone());
let leader1000 = region1000
.get_peers()
.iter()
.find(|p| p.get_store_id()!= leader1.get_store_id())
.unwrap()
.clone();
cluster.must_transfer_leader(region1000.get_id(), leader1000.clone());
assert_ne!(leader1.get_store_id(), leader1000.get_store_id());
// Sleeps an election timeout. The new leader needs enough time to gather
// all followers progress, in cause the merge request is reject by the
// log gap too large (min_progress == 0).
thread::sleep(election_timeout);
// merge into
// region1000 ------------> region1
cluster.must_try_merge(region1000.get_id(), region1.get_id());
// Pause the apply workers except for the peer 4.
let apply_commit_merge = "apply_before_commit_merge_except_1_4";
fail::cfg(apply_commit_merge, "pause").unwrap();
// Wait for commit merge.
// The TiKVs that have followers of the old region will elected a leader
// of the new region.
// TiKV A TiKV B TiKV C
// Region 1 L F F
// Region 1000 F L F
// after wait
// Region 1 L F F
// Region 1000 X L F
// Note: L: leader, F: follower, X: peer is not exist.
// TODO: what if cluster runs slow and lease is expired.
// Epoch changed by prepare merge.
// We can not use `get_region_with` to get the latest info of reigon 1000,
// because leader1 is not paused, it executes commit merge very fast
// and reports pd, its range covers region1000.
//
// region1000 does prepare merge, it increases ver and conf_ver by 1.
debug!("before merge: {:?} | {:?}", region1000, region1);
let region1000_version = region1000.get_region_epoch().get_version() + 1;
region1000
.mut_region_epoch()
.set_version(region1000_version);
let region1000_conf_version = region1000.get_region_epoch().get_conf_ver() + 1;
region1000
.mut_region_epoch()
.set_conf_ver(region1000_conf_version);
// Epoch changed by commit merge.
region1 = cluster.get_region_with(key1, |region| region!= ®ion1);
debug!("after merge: {:?} | {:?}", region1000, region1);
// A key that is covered by region 1000 and region 1.
let stale_key = key2;
must_not_stale_read(
&mut cluster,
stale_key,
®ion1000,
&leader1000,
®ion1,
&leader1,
apply_commit_merge,
);
}
#[test]
fn test_read_index_when_transfer_leader_2() {
let mut cluster = new_node_cluster(0, 3);
// Increase the election tick to make this test case running reliably.
configure_for_lease_read(&mut cluster, Some(50), Some(10_000));
// Stop log compaction to transfer leader with filter easier.
configure_for_request_snapshot(&mut cluster);
let max_lease = Duration::from_secs(2);
cluster.cfg.raft_store.raft_store_max_leader_lease = ReadableDuration(max_lease);
// Add peer 2 and 3 and wait them to apply it.
cluster.pd_client.disable_default_operator();
let r1 = cluster.run_conf_change();
cluster.must_put(b"k0", b"v0");
cluster.pd_client.must_add_peer(r1, new_peer(2, 2));
cluster.pd_client.must_add_peer(r1, new_peer(3, 3));
must_get_equal(&cluster.get_engine(2), b"k0", b"v0");
must_get_equal(&cluster.get_engine(3), b"k0", b"v0");
// Put and test again to ensure that peer 3 get the latest writes by message append
// instead of snapshot, so that transfer leader to peer 3 can 100% success.
cluster.must_put(b"k1", b"v1");
must_get_equal(&cluster.get_engine(2), b"k1", b"v1");
must_get_equal(&cluster.get_engine(3), b"k1", b"v1");
let r1 = cluster.get_region(b"k1");
let old_leader = cluster.leader_of_region(r1.get_id()).unwrap();
// Use a macro instead of a closure to avoid any capture of local variables.
macro_rules! read_on_old_leader {
() => {{
let (tx, rx) = mpsc::sync_channel(1);
let mut read_request = new_request(
r1.get_id(),
r1.get_region_epoch().clone(),
vec![new_get_cmd(b"k1")],
true, // read quorum
);
read_request.mut_header().set_peer(new_peer(1, 1));
let sim = cluster.sim.wl();
sim.async_command_on_node(
old_leader.get_id(),
read_request,
Callback::Read(Box::new(move |resp| tx.send(resp.response).unwrap())),
)
.unwrap();
rx
}};
}
// Delay all raft messages to peer 1.
let dropped_msgs = Arc::new(Mutex::new(Vec::new()));
let filter = Box::new(
RegionPacketFilter::new(r1.get_id(), old_leader.get_store_id())
.direction(Direction::Recv)
.skip(MessageType::MsgTransferLeader)
.when(Arc::new(AtomicBool::new(true)))
.reserve_dropped(Arc::clone(&dropped_msgs)),
);
cluster
.sim
.wl()
.add_recv_filter(old_leader.get_id(), filter);
let resp1 = read_on_old_leader!();
cluster.must_transfer_leader(r1.get_id(), new_peer(3, 3));
let resp2 = read_on_old_leader!();
// Unpark all pending messages and clear all filters.
let router = cluster.sim.wl().get_router(old_leader.get_id()).unwrap();
let mut reserved_msgs = Vec::new();
'LOOP: loop {
for raft_msg in std::mem::take(&mut *dropped_msgs.lock().unwrap()) {
let msg_type = raft_msg.get_message().get_msg_type();
if msg_type == MessageType::MsgHeartbeatResponse || msg_type == MessageType::MsgAppend |
}
}
// Resume reserved messages in one batch to make sure the old leader can get read and role
// change in one `Ready`.
fail::cfg("pause_on_peer_collect_message", "pause").unwrap();
for raft_msg in reserved_msgs {
router.send_raft_message(raft_msg).unwrap();
}
fail::cfg("pause_on_peer_collect_message", "off").unwrap();
cluster.sim.wl().clear_recv_filters(old_leader.get_id());
let resp1 = resp1.recv().unwrap();
assert!(resp1.get_header().get_error().has_stale_command());
// Response 2 should contains an error.
let resp2 = resp2.recv().unwrap();
assert!(resp2.get_header().get_error().has_stale_command());
drop(cluster);
fail::remove("pause_on_peer_collect_message");
}
#[test]
fn test_read_after_peer_destroyed() {
let mut cluster = new_node_cluster(0, 3);
let pd_client = cluster.pd_client.clone();
// Disable default max peer number check.
pd_client.disable_default_operator();
let r1 = cluster.run_conf_change();
// Add 2 peers.
for i in 2..4 {
pd_client.must_add_peer(r1, new_peer(i, i));
}
// Make sure peer 1 leads the region.
cluster.must_transfer_leader(r1, new_peer(1, 1));
let (key, value) = (b"k1", b"v1");
cluster.must_put(key, value);
assert_eq!(cluster.get(key), Some(value.to_vec()));
let destroy_peer_fp = "destroy_peer";
fail::cfg(destroy_peer_fp, "pause").unwrap();
pd_client.must_remove_peer(r1, new_peer(1, 1));
sleep_ms(300);
// Try writing k2 to peer3
let mut request = new_request(
r1,
cluster.pd_client.get_region_epoch(r1),
vec![new_get_cmd(b"k1")],
false,
);
request.mut_header().set_peer(new_peer(1, 1));
let (cb, rx) = make_cb(&request);
cluster
.sim
.rl()
.async_command_on_node(1, request, cb)
.unwrap();
// Wait for raftstore receives the read request.
sleep_ms(200);
fail::remove(destroy_peer_fp);
let resp = rx.recv_timeout(Duration::from_millis(200)).unwrap();
assert!(
resp.get_header().get_error().has_region_not_found(),
"{:?}",
resp
);
}
/// In previous implementation, we suspect the leader lease at the position of `leader_commit_prepare_merge`
/// failpoint when `PrepareMerge` log is committed, which is too late to prevent stale read.
#[test]
fn test_stale_read_during_merging_2() {
let mut cluster = new_node_cluster(0, 3);
let pd_client = cluster.pd_client.clone();
pd_client.disable_default_operator();
configure_for_merge(&mut cluster);
configure_for_lease_read(&mut cluster, Some(50), Some(20));
cluster.run();
for i in 0..10 {
cluster.must_put(format!("k{}", i).as_bytes(), b"v");
}
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k2").unwrap();
let left_peer_1 = find_peer(&left, 1).unwrap().to_owned();
cluster.must_transfer_leader(left.get_id(), left_peer_1.clone());
let right_peer_3 = find_peer(&right, 3).unwrap().to_owned();
cluster.must_transfer_leader(right.get_id(), right_peer_3);
let leader_commit_prepare_merge_fp = "leader_commit_prepare_merge";
fail::cfg(leader_commit_prepare_merge_fp, "pause").unwrap();
pd_client.must_merge(left.get_id(), right.get_id());
cluster.must_put(b"k1", b"v1");
let value = read_on_peer(
&mut cluster,
left_peer_1,
left,
b"k1",
false,
Duration::from_millis(200),
);
// The leader lease must be suspected so the local read is forbidden.
// The result should be Error::Timeout because the leader is paused at
// the position of `leader_commit_prepare_merge` failpoint.
// In previous implementation, the result is ok and the value is "v"
// but the right answer is "v1".
value.unwrap_err();
fail::remove(leader_commit_prepare_merge_fp);
}
| {
reserved_msgs.push(raft_msg);
if msg_type == MessageType::MsgAppend {
break 'LOOP;
}
} | conditional_block |
test_stale_read.rs | // Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::atomic::*;
use std::sync::{mpsc, Arc, Mutex};
use std::thread;
use std::time::Duration;
use kvproto::metapb::{Peer, Region};
use raft::eraftpb::MessageType;
use pd_client::PdClient;
use raftstore::store::Callback;
use test_raftstore::*;
use tikv_util::config::*;
use tikv_util::HandyRwLock;
fn stale_read_during_splitting(right_derive: bool) {
let count = 3;
let mut cluster = new_node_cluster(0, count);
cluster.cfg.raft_store.right_derive_when_split = right_derive;
let election_timeout = configure_for_lease_read(&mut cluster, None, None);
cluster.run();
// Write the initial values.
let key1 = b"k1";
let v1 = b"v1";
cluster.must_put(key1, v1);
let key2 = b"k2";
let v2 = b"v2";
cluster.must_put(key2, v2);
// Get the first region.
let region_left = cluster.get_region(key1);
let region_right = cluster.get_region(key2);
assert_eq!(region_left, region_right);
let region1 = region_left;
assert_eq!(region1.get_id(), 1);
let peer3 = region1
.get_peers()
.iter()
.find(|p| p.get_id() == 3)
.unwrap()
.clone();
cluster.must_transfer_leader(region1.get_id(), peer3.clone());
// Get the current leader.
let leader1 = peer3;
// Pause the apply worker of peer 3.
let apply_split = "apply_before_split_1_3";
fail::cfg(apply_split, "pause").unwrap();
// Split the first region.
cluster.split_region(®ion1, key2, Callback::write(Box::new(move |_| {})));
// Sleep for a while.
// The TiKVs that have followers of the old region will elected a leader
// of the new region.
// TiKV A TiKV B TiKV C
// Region 1 L F F
// Region 2 X L F
// Note: A has the peer 3,
// L: leader, F: follower, X: peer is not ready.
thread::sleep(election_timeout);
// A key that is covered by the old region and the new region.
let stale_key = if right_derive { key1 } else { key2 };
// Get the new region.
let region2 = cluster.get_region_with(stale_key, |region| region!= ®ion1);
// Get the leader of the new region.
let leader2 = cluster.leader_of_region(region2.get_id()).unwrap();
assert_ne!(leader1.get_store_id(), leader2.get_store_id());
must_not_stale_read(
&mut cluster,
stale_key,
®ion1,
&leader1,
®ion2,
&leader2,
apply_split,
);
}
fn must_not_stale_read(
cluster: &mut Cluster<NodeCluster>,
stale_key: &[u8],
old_region: &Region,
old_leader: &Peer,
new_region: &Region,
new_leader: &Peer,
fp: &str,
) {
// A new value for stale_key.
let v3 = b"v3";
let mut request = new_request(
new_region.get_id(),
new_region.get_region_epoch().clone(),
vec![new_put_cf_cmd("default", stale_key, v3)],
false,
);
request.mut_header().set_peer(new_leader.clone());
cluster
.call_command_on_node(new_leader.get_store_id(), request, Duration::from_secs(5))
.unwrap();
// LocalRead.
let read_quorum = false;
must_not_eq_on_key(
cluster,
stale_key,
v3,
read_quorum,
old_region,
old_leader,
new_region,
new_leader,
);
// ReadIndex.
let read_quorum = true;
must_not_eq_on_key(
cluster,
stale_key,
v3,
read_quorum,
old_region,
old_leader,
new_region,
new_leader,
);
// Leaders can always propose read index despite split/merge.
let propose_readindex = "before_propose_readindex";
fail::cfg(propose_readindex, "return(true)").unwrap();
// Can not execute reads that are queued.
let value1 = read_on_peer(
cluster,
old_leader.clone(),
old_region.clone(),
stale_key,
read_quorum,
Duration::from_secs(1),
);
debug!("stale_key: {:?}, {:?}", stale_key, value1);
value1.unwrap_err(); // Error::Timeout
// Remove the fp.
fail::remove(fp);
// It should read an error instead of timeout.
let value1 = read_on_peer(
cluster,
old_leader.clone(),
old_region.clone(),
stale_key,
read_quorum,
Duration::from_secs(5),
);
debug!("stale_key: {:?}, {:?}", stale_key, value1);
assert!(value1.unwrap().get_header().has_error());
// Clean up.
fail::remove(propose_readindex);
}
fn | (
cluster: &mut Cluster<NodeCluster>,
key: &[u8],
value: &[u8],
read_quorum: bool,
old_region: &Region,
old_leader: &Peer,
new_region: &Region,
new_leader: &Peer,
) {
let value1 = read_on_peer(
cluster,
old_leader.clone(),
old_region.clone(),
key,
read_quorum,
Duration::from_secs(1),
);
let value2 = read_on_peer(
cluster,
new_leader.clone(),
new_region.clone(),
key,
read_quorum,
Duration::from_secs(1),
);
debug!("stale_key: {:?}, {:?} vs {:?}", key, value1, value2);
assert_eq!(must_get_value(value2.as_ref().unwrap()).as_slice(), value);
// The old leader should return an error.
assert!(
value1.as_ref().unwrap().get_header().has_error(),
"{:?}",
value1
);
}
#[test]
fn test_node_stale_read_during_splitting_left_derive() {
stale_read_during_splitting(false);
}
#[test]
fn test_node_stale_read_during_splitting_right_derive() {
stale_read_during_splitting(true);
}
#[test]
fn test_stale_read_during_merging() {
let count = 3;
let mut cluster = new_node_cluster(0, count);
configure_for_merge(&mut cluster);
let election_timeout = configure_for_lease_read(&mut cluster, None, None);
cluster.cfg.raft_store.right_derive_when_split = false;
cluster.cfg.raft_store.pd_heartbeat_tick_interval =
cluster.cfg.raft_store.raft_base_tick_interval;
debug!("max leader lease: {:?}", election_timeout);
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
cluster.run_conf_change();
// Write the initial values.
let key1 = b"k1";
let v1 = b"v1";
cluster.must_put(key1, v1);
let key2 = b"k2";
let v2 = b"v2";
cluster.must_put(key2, v2);
let region = pd_client.get_region(b"k1").unwrap();
pd_client.must_add_peer(region.get_id(), new_peer(2, 4));
pd_client.must_add_peer(region.get_id(), new_peer(3, 5));
cluster.must_split(®ion, b"k2");
let mut region1 = cluster.get_region(key1);
let mut region1000 = cluster.get_region(key2);
assert_ne!(region1, region1000);
assert_eq!(region1.get_id(), 1); // requires disable right_derive.
let leader1 = region1
.get_peers()
.iter()
.find(|p| p.get_id() == 4)
.unwrap()
.clone();
cluster.must_transfer_leader(region1.get_id(), leader1.clone());
let leader1000 = region1000
.get_peers()
.iter()
.find(|p| p.get_store_id()!= leader1.get_store_id())
.unwrap()
.clone();
cluster.must_transfer_leader(region1000.get_id(), leader1000.clone());
assert_ne!(leader1.get_store_id(), leader1000.get_store_id());
// Sleeps an election timeout. The new leader needs enough time to gather
// all followers progress, in cause the merge request is reject by the
// log gap too large (min_progress == 0).
thread::sleep(election_timeout);
// merge into
// region1000 ------------> region1
cluster.must_try_merge(region1000.get_id(), region1.get_id());
// Pause the apply workers except for the peer 4.
let apply_commit_merge = "apply_before_commit_merge_except_1_4";
fail::cfg(apply_commit_merge, "pause").unwrap();
// Wait for commit merge.
// The TiKVs that have followers of the old region will elected a leader
// of the new region.
// TiKV A TiKV B TiKV C
// Region 1 L F F
// Region 1000 F L F
// after wait
// Region 1 L F F
// Region 1000 X L F
// Note: L: leader, F: follower, X: peer is not exist.
// TODO: what if cluster runs slow and lease is expired.
// Epoch changed by prepare merge.
// We can not use `get_region_with` to get the latest info of reigon 1000,
// because leader1 is not paused, it executes commit merge very fast
// and reports pd, its range covers region1000.
//
// region1000 does prepare merge, it increases ver and conf_ver by 1.
debug!("before merge: {:?} | {:?}", region1000, region1);
let region1000_version = region1000.get_region_epoch().get_version() + 1;
region1000
.mut_region_epoch()
.set_version(region1000_version);
let region1000_conf_version = region1000.get_region_epoch().get_conf_ver() + 1;
region1000
.mut_region_epoch()
.set_conf_ver(region1000_conf_version);
// Epoch changed by commit merge.
region1 = cluster.get_region_with(key1, |region| region!= ®ion1);
debug!("after merge: {:?} | {:?}", region1000, region1);
// A key that is covered by region 1000 and region 1.
let stale_key = key2;
must_not_stale_read(
&mut cluster,
stale_key,
®ion1000,
&leader1000,
®ion1,
&leader1,
apply_commit_merge,
);
}
#[test]
fn test_read_index_when_transfer_leader_2() {
let mut cluster = new_node_cluster(0, 3);
// Increase the election tick to make this test case running reliably.
configure_for_lease_read(&mut cluster, Some(50), Some(10_000));
// Stop log compaction to transfer leader with filter easier.
configure_for_request_snapshot(&mut cluster);
let max_lease = Duration::from_secs(2);
cluster.cfg.raft_store.raft_store_max_leader_lease = ReadableDuration(max_lease);
// Add peer 2 and 3 and wait them to apply it.
cluster.pd_client.disable_default_operator();
let r1 = cluster.run_conf_change();
cluster.must_put(b"k0", b"v0");
cluster.pd_client.must_add_peer(r1, new_peer(2, 2));
cluster.pd_client.must_add_peer(r1, new_peer(3, 3));
must_get_equal(&cluster.get_engine(2), b"k0", b"v0");
must_get_equal(&cluster.get_engine(3), b"k0", b"v0");
// Put and test again to ensure that peer 3 get the latest writes by message append
// instead of snapshot, so that transfer leader to peer 3 can 100% success.
cluster.must_put(b"k1", b"v1");
must_get_equal(&cluster.get_engine(2), b"k1", b"v1");
must_get_equal(&cluster.get_engine(3), b"k1", b"v1");
let r1 = cluster.get_region(b"k1");
let old_leader = cluster.leader_of_region(r1.get_id()).unwrap();
// Use a macro instead of a closure to avoid any capture of local variables.
macro_rules! read_on_old_leader {
() => {{
let (tx, rx) = mpsc::sync_channel(1);
let mut read_request = new_request(
r1.get_id(),
r1.get_region_epoch().clone(),
vec![new_get_cmd(b"k1")],
true, // read quorum
);
read_request.mut_header().set_peer(new_peer(1, 1));
let sim = cluster.sim.wl();
sim.async_command_on_node(
old_leader.get_id(),
read_request,
Callback::Read(Box::new(move |resp| tx.send(resp.response).unwrap())),
)
.unwrap();
rx
}};
}
// Delay all raft messages to peer 1.
let dropped_msgs = Arc::new(Mutex::new(Vec::new()));
let filter = Box::new(
RegionPacketFilter::new(r1.get_id(), old_leader.get_store_id())
.direction(Direction::Recv)
.skip(MessageType::MsgTransferLeader)
.when(Arc::new(AtomicBool::new(true)))
.reserve_dropped(Arc::clone(&dropped_msgs)),
);
cluster
.sim
.wl()
.add_recv_filter(old_leader.get_id(), filter);
let resp1 = read_on_old_leader!();
cluster.must_transfer_leader(r1.get_id(), new_peer(3, 3));
let resp2 = read_on_old_leader!();
// Unpark all pending messages and clear all filters.
let router = cluster.sim.wl().get_router(old_leader.get_id()).unwrap();
let mut reserved_msgs = Vec::new();
'LOOP: loop {
for raft_msg in std::mem::take(&mut *dropped_msgs.lock().unwrap()) {
let msg_type = raft_msg.get_message().get_msg_type();
if msg_type == MessageType::MsgHeartbeatResponse || msg_type == MessageType::MsgAppend {
reserved_msgs.push(raft_msg);
if msg_type == MessageType::MsgAppend {
break 'LOOP;
}
}
}
}
// Resume reserved messages in one batch to make sure the old leader can get read and role
// change in one `Ready`.
fail::cfg("pause_on_peer_collect_message", "pause").unwrap();
for raft_msg in reserved_msgs {
router.send_raft_message(raft_msg).unwrap();
}
fail::cfg("pause_on_peer_collect_message", "off").unwrap();
cluster.sim.wl().clear_recv_filters(old_leader.get_id());
let resp1 = resp1.recv().unwrap();
assert!(resp1.get_header().get_error().has_stale_command());
// Response 2 should contains an error.
let resp2 = resp2.recv().unwrap();
assert!(resp2.get_header().get_error().has_stale_command());
drop(cluster);
fail::remove("pause_on_peer_collect_message");
}
#[test]
fn test_read_after_peer_destroyed() {
let mut cluster = new_node_cluster(0, 3);
let pd_client = cluster.pd_client.clone();
// Disable default max peer number check.
pd_client.disable_default_operator();
let r1 = cluster.run_conf_change();
// Add 2 peers.
for i in 2..4 {
pd_client.must_add_peer(r1, new_peer(i, i));
}
// Make sure peer 1 leads the region.
cluster.must_transfer_leader(r1, new_peer(1, 1));
let (key, value) = (b"k1", b"v1");
cluster.must_put(key, value);
assert_eq!(cluster.get(key), Some(value.to_vec()));
let destroy_peer_fp = "destroy_peer";
fail::cfg(destroy_peer_fp, "pause").unwrap();
pd_client.must_remove_peer(r1, new_peer(1, 1));
sleep_ms(300);
// Try writing k2 to peer3
let mut request = new_request(
r1,
cluster.pd_client.get_region_epoch(r1),
vec![new_get_cmd(b"k1")],
false,
);
request.mut_header().set_peer(new_peer(1, 1));
let (cb, rx) = make_cb(&request);
cluster
.sim
.rl()
.async_command_on_node(1, request, cb)
.unwrap();
// Wait for raftstore receives the read request.
sleep_ms(200);
fail::remove(destroy_peer_fp);
let resp = rx.recv_timeout(Duration::from_millis(200)).unwrap();
assert!(
resp.get_header().get_error().has_region_not_found(),
"{:?}",
resp
);
}
/// In previous implementation, we suspect the leader lease at the position of `leader_commit_prepare_merge`
/// failpoint when `PrepareMerge` log is committed, which is too late to prevent stale read.
#[test]
fn test_stale_read_during_merging_2() {
let mut cluster = new_node_cluster(0, 3);
let pd_client = cluster.pd_client.clone();
pd_client.disable_default_operator();
configure_for_merge(&mut cluster);
configure_for_lease_read(&mut cluster, Some(50), Some(20));
cluster.run();
for i in 0..10 {
cluster.must_put(format!("k{}", i).as_bytes(), b"v");
}
let region = pd_client.get_region(b"k1").unwrap();
cluster.must_split(®ion, b"k2");
let left = pd_client.get_region(b"k1").unwrap();
let right = pd_client.get_region(b"k2").unwrap();
let left_peer_1 = find_peer(&left, 1).unwrap().to_owned();
cluster.must_transfer_leader(left.get_id(), left_peer_1.clone());
let right_peer_3 = find_peer(&right, 3).unwrap().to_owned();
cluster.must_transfer_leader(right.get_id(), right_peer_3);
let leader_commit_prepare_merge_fp = "leader_commit_prepare_merge";
fail::cfg(leader_commit_prepare_merge_fp, "pause").unwrap();
pd_client.must_merge(left.get_id(), right.get_id());
cluster.must_put(b"k1", b"v1");
let value = read_on_peer(
&mut cluster,
left_peer_1,
left,
b"k1",
false,
Duration::from_millis(200),
);
// The leader lease must be suspected so the local read is forbidden.
// The result should be Error::Timeout because the leader is paused at
// the position of `leader_commit_prepare_merge` failpoint.
// In previous implementation, the result is ok and the value is "v"
// but the right answer is "v1".
value.unwrap_err();
fail::remove(leader_commit_prepare_merge_fp);
}
| must_not_eq_on_key | identifier_name |
callback.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Base classes to work with IDL callbacks.
use dom::bindings::error::{Error, Fallible};
use dom::bindings::global::global_object_for_js_object;
use dom::bindings::reflector::Reflectable;
use js::jsapi::GetGlobalForObjectCrossCompartment;
use js::jsapi::{Heap, MutableHandleObject, RootedObject, RootedValue};
use js::jsapi::{IsCallable, JSContext, JSObject, JS_WrapObject};
use js::jsapi::{JSAutoCompartment};
use js::jsapi::{JSCompartment, JS_EnterCompartment, JS_LeaveCompartment};
use js::jsapi::{JS_BeginRequest, JS_EndRequest};
use js::jsapi::{JS_GetProperty, JS_IsExceptionPending, JS_ReportPendingException};
use js::jsapi::{JS_RestoreFrameChain, JS_SaveFrameChain};
use js::jsval::{JSVal, UndefinedValue};
use std::default::Default;
use std::ffi::CString;
use std::intrinsics::return_address;
use std::ptr;
use std::rc::Rc;
/// The exception handling used for a call.
#[derive(Copy, Clone, PartialEq)]
pub enum ExceptionHandling {
/// Report any exception and don't throw it to the caller code.
Report,
/// Throw any exception to the caller code.
Rethrow
}
/// A common base class for representing IDL callback function types.
#[derive(JSTraceable, PartialEq)]
pub struct CallbackFunction {
object: CallbackObject
}
impl CallbackFunction {
/// Create a new `CallbackFunction` for this object.
pub fn new() -> CallbackFunction {
CallbackFunction {
object: CallbackObject {
callback: Heap::default()
}
}
}
/// Initialize the callback function with a value.
/// Should be called once this object is done moving.
pub fn init(&mut self, callback: *mut JSObject) {
self.object.callback.set(callback);
}
}
/// A common base class for representing IDL callback interface types.
#[derive(JSTraceable, PartialEq)]
pub struct CallbackInterface {
object: CallbackObject
}
/// A common base class for representing IDL callback function and
/// callback interface types.
#[allow(raw_pointer_derive)]
#[derive(JSTraceable)]
struct CallbackObject {
/// The underlying `JSObject`.
callback: Heap<*mut JSObject>,
}
impl PartialEq for CallbackObject {
fn eq(&self, other: &CallbackObject) -> bool {
self.callback.get() == other.callback.get()
}
}
/// A trait to be implemented by concrete IDL callback function and
/// callback interface types.
pub trait CallbackContainer {
/// Create a new CallbackContainer object for the given `JSObject`.
fn new(callback: *mut JSObject) -> Rc<Self>;
/// Returns the underlying `JSObject`.
fn callback(&self) -> *mut JSObject;
}
impl CallbackInterface {
/// Returns the underlying `JSObject`.
pub fn callback(&self) -> *mut JSObject {
self.object.callback.get()
}
}
impl CallbackFunction {
/// Returns the underlying `JSObject`.
pub fn callback(&self) -> *mut JSObject {
self.object.callback.get()
}
}
impl CallbackInterface {
/// Create a new CallbackInterface object for the given `JSObject`.
pub fn new() -> CallbackInterface {
CallbackInterface {
object: CallbackObject {
callback: Heap::default()
}
}
}
/// Initialize the callback function with a value.
/// Should be called once this object is done moving.
pub fn init(&mut self, callback: *mut JSObject) {
self.object.callback.set(callback);
}
/// Returns the property with the given `name`, if it is a callable object,
/// or an error otherwise.
pub fn get_callable_property(&self, cx: *mut JSContext, name: &str)
-> Fallible<JSVal> {
let mut callable = RootedValue::new(cx, UndefinedValue());
let obj = RootedObject::new(cx, self.callback());
unsafe {
let c_name = CString::new(name).unwrap();
if!JS_GetProperty(cx, obj.handle(), c_name.as_ptr(),
callable.handle_mut()) {
return Err(Error::JSFailed);
}
if!callable.ptr.is_object() ||
!IsCallable(callable.ptr.to_object()) {
return Err(Error::Type(
format!("The value of the {} property is not callable", name)));
}
}
Ok(callable.ptr)
}
}
/// Wraps the reflector for `p` into the compartment of `cx`.
pub fn wrap_call_this_object<T: Reflectable>(cx: *mut JSContext,
p: &T,
rval: MutableHandleObject) {
rval.set(p.reflector().get_jsobject().get());
assert!(!rval.get().is_null());
unsafe {
if!JS_WrapObject(cx, rval) {
rval.set(ptr::null_mut());
}
}
}
/// A class that performs whatever setup we need to safely make a call while
/// this class is on the stack. After `new` returns, the call is safe to make.
pub struct CallSetup {
/// The compartment for reporting exceptions.
/// As a RootedObject, this must be the first field in order to
/// determine the final address on the stack correctly.
exception_compartment: RootedObject,
/// The `JSContext` used for the call.
cx: *mut JSContext,
/// The compartment we were in before the call.
old_compartment: *mut JSCompartment,
/// The exception handling used for the call.
handling: ExceptionHandling,
}
impl CallSetup {
/// Performs the setup needed to make a call.
#[allow(unrooted_must_root)]
pub fn new<T: CallbackContainer>(callback: &T, handling: ExceptionHandling) -> CallSetup {
let global = global_object_for_js_object(callback.callback());
let cx = global.r().get_cx();
unsafe { JS_BeginRequest(cx); }
let exception_compartment = unsafe {
GetGlobalForObjectCrossCompartment(callback.callback())
};
CallSetup {
exception_compartment:
RootedObject::new_with_addr(cx, exception_compartment,
unsafe { return_address() }),
cx: cx,
old_compartment: unsafe { JS_EnterCompartment(cx, callback.callback()) },
handling: handling,
}
}
/// Returns the `JSContext` used for the call.
pub fn get_context(&self) -> *mut JSContext {
self.cx
}
}
impl Drop for CallSetup {
fn drop(&mut self) {
unsafe { JS_LeaveCompartment(self.cx, self.old_compartment); }
let need_to_deal_with_exception =
self.handling == ExceptionHandling::Report &&
unsafe { JS_IsExceptionPending(self.cx) };
if need_to_deal_with_exception {
unsafe {
let old_global = RootedObject::new(self.cx, self.exception_compartment.ptr);
let saved = JS_SaveFrameChain(self.cx);
{
let _ac = JSAutoCompartment::new(self.cx, old_global.ptr);
JS_ReportPendingException(self.cx);
}
if saved |
}
}
unsafe { JS_EndRequest(self.cx); }
}
}
| {
JS_RestoreFrameChain(self.cx);
} | conditional_block |
callback.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Base classes to work with IDL callbacks.
use dom::bindings::error::{Error, Fallible};
use dom::bindings::global::global_object_for_js_object;
use dom::bindings::reflector::Reflectable;
use js::jsapi::GetGlobalForObjectCrossCompartment;
use js::jsapi::{Heap, MutableHandleObject, RootedObject, RootedValue};
use js::jsapi::{IsCallable, JSContext, JSObject, JS_WrapObject};
use js::jsapi::{JSAutoCompartment};
use js::jsapi::{JSCompartment, JS_EnterCompartment, JS_LeaveCompartment};
use js::jsapi::{JS_BeginRequest, JS_EndRequest};
use js::jsapi::{JS_GetProperty, JS_IsExceptionPending, JS_ReportPendingException};
use js::jsapi::{JS_RestoreFrameChain, JS_SaveFrameChain};
use js::jsval::{JSVal, UndefinedValue};
use std::default::Default;
use std::ffi::CString;
use std::intrinsics::return_address;
use std::ptr;
use std::rc::Rc;
/// The exception handling used for a call.
#[derive(Copy, Clone, PartialEq)]
pub enum ExceptionHandling {
/// Report any exception and don't throw it to the caller code.
Report,
/// Throw any exception to the caller code.
Rethrow
}
/// A common base class for representing IDL callback function types.
#[derive(JSTraceable, PartialEq)]
pub struct CallbackFunction {
object: CallbackObject
}
impl CallbackFunction {
/// Create a new `CallbackFunction` for this object.
pub fn new() -> CallbackFunction {
CallbackFunction {
object: CallbackObject {
callback: Heap::default()
}
}
}
/// Initialize the callback function with a value.
/// Should be called once this object is done moving.
pub fn init(&mut self, callback: *mut JSObject) {
self.object.callback.set(callback);
}
}
/// A common base class for representing IDL callback interface types.
#[derive(JSTraceable, PartialEq)]
pub struct CallbackInterface {
object: CallbackObject
}
/// A common base class for representing IDL callback function and
/// callback interface types.
#[allow(raw_pointer_derive)]
#[derive(JSTraceable)]
struct CallbackObject {
/// The underlying `JSObject`.
callback: Heap<*mut JSObject>,
}
impl PartialEq for CallbackObject {
fn eq(&self, other: &CallbackObject) -> bool {
self.callback.get() == other.callback.get()
}
}
/// A trait to be implemented by concrete IDL callback function and
/// callback interface types.
pub trait CallbackContainer {
/// Create a new CallbackContainer object for the given `JSObject`.
fn new(callback: *mut JSObject) -> Rc<Self>;
/// Returns the underlying `JSObject`.
fn callback(&self) -> *mut JSObject;
}
impl CallbackInterface {
/// Returns the underlying `JSObject`.
pub fn callback(&self) -> *mut JSObject {
self.object.callback.get()
}
}
impl CallbackFunction {
/// Returns the underlying `JSObject`.
pub fn callback(&self) -> *mut JSObject {
self.object.callback.get()
}
}
impl CallbackInterface {
/// Create a new CallbackInterface object for the given `JSObject`.
pub fn new() -> CallbackInterface |
/// Initialize the callback function with a value.
/// Should be called once this object is done moving.
pub fn init(&mut self, callback: *mut JSObject) {
self.object.callback.set(callback);
}
/// Returns the property with the given `name`, if it is a callable object,
/// or an error otherwise.
pub fn get_callable_property(&self, cx: *mut JSContext, name: &str)
-> Fallible<JSVal> {
let mut callable = RootedValue::new(cx, UndefinedValue());
let obj = RootedObject::new(cx, self.callback());
unsafe {
let c_name = CString::new(name).unwrap();
if!JS_GetProperty(cx, obj.handle(), c_name.as_ptr(),
callable.handle_mut()) {
return Err(Error::JSFailed);
}
if!callable.ptr.is_object() ||
!IsCallable(callable.ptr.to_object()) {
return Err(Error::Type(
format!("The value of the {} property is not callable", name)));
}
}
Ok(callable.ptr)
}
}
/// Wraps the reflector for `p` into the compartment of `cx`.
pub fn wrap_call_this_object<T: Reflectable>(cx: *mut JSContext,
p: &T,
rval: MutableHandleObject) {
rval.set(p.reflector().get_jsobject().get());
assert!(!rval.get().is_null());
unsafe {
if!JS_WrapObject(cx, rval) {
rval.set(ptr::null_mut());
}
}
}
/// A class that performs whatever setup we need to safely make a call while
/// this class is on the stack. After `new` returns, the call is safe to make.
pub struct CallSetup {
/// The compartment for reporting exceptions.
/// As a RootedObject, this must be the first field in order to
/// determine the final address on the stack correctly.
exception_compartment: RootedObject,
/// The `JSContext` used for the call.
cx: *mut JSContext,
/// The compartment we were in before the call.
old_compartment: *mut JSCompartment,
/// The exception handling used for the call.
handling: ExceptionHandling,
}
impl CallSetup {
/// Performs the setup needed to make a call.
#[allow(unrooted_must_root)]
pub fn new<T: CallbackContainer>(callback: &T, handling: ExceptionHandling) -> CallSetup {
let global = global_object_for_js_object(callback.callback());
let cx = global.r().get_cx();
unsafe { JS_BeginRequest(cx); }
let exception_compartment = unsafe {
GetGlobalForObjectCrossCompartment(callback.callback())
};
CallSetup {
exception_compartment:
RootedObject::new_with_addr(cx, exception_compartment,
unsafe { return_address() }),
cx: cx,
old_compartment: unsafe { JS_EnterCompartment(cx, callback.callback()) },
handling: handling,
}
}
/// Returns the `JSContext` used for the call.
pub fn get_context(&self) -> *mut JSContext {
self.cx
}
}
impl Drop for CallSetup {
fn drop(&mut self) {
unsafe { JS_LeaveCompartment(self.cx, self.old_compartment); }
let need_to_deal_with_exception =
self.handling == ExceptionHandling::Report &&
unsafe { JS_IsExceptionPending(self.cx) };
if need_to_deal_with_exception {
unsafe {
let old_global = RootedObject::new(self.cx, self.exception_compartment.ptr);
let saved = JS_SaveFrameChain(self.cx);
{
let _ac = JSAutoCompartment::new(self.cx, old_global.ptr);
JS_ReportPendingException(self.cx);
}
if saved {
JS_RestoreFrameChain(self.cx);
}
}
}
unsafe { JS_EndRequest(self.cx); }
}
}
| {
CallbackInterface {
object: CallbackObject {
callback: Heap::default()
}
}
} | identifier_body |
callback.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Base classes to work with IDL callbacks.
use dom::bindings::error::{Error, Fallible};
use dom::bindings::global::global_object_for_js_object;
use dom::bindings::reflector::Reflectable;
use js::jsapi::GetGlobalForObjectCrossCompartment;
use js::jsapi::{Heap, MutableHandleObject, RootedObject, RootedValue};
use js::jsapi::{IsCallable, JSContext, JSObject, JS_WrapObject};
use js::jsapi::{JSAutoCompartment};
use js::jsapi::{JSCompartment, JS_EnterCompartment, JS_LeaveCompartment};
use js::jsapi::{JS_BeginRequest, JS_EndRequest};
use js::jsapi::{JS_GetProperty, JS_IsExceptionPending, JS_ReportPendingException};
use js::jsapi::{JS_RestoreFrameChain, JS_SaveFrameChain};
use js::jsval::{JSVal, UndefinedValue};
use std::default::Default;
use std::ffi::CString;
use std::intrinsics::return_address;
use std::ptr;
use std::rc::Rc;
/// The exception handling used for a call.
#[derive(Copy, Clone, PartialEq)]
pub enum ExceptionHandling {
/// Report any exception and don't throw it to the caller code.
Report,
/// Throw any exception to the caller code.
Rethrow
}
/// A common base class for representing IDL callback function types.
#[derive(JSTraceable, PartialEq)]
pub struct CallbackFunction {
object: CallbackObject
}
impl CallbackFunction {
/// Create a new `CallbackFunction` for this object.
pub fn new() -> CallbackFunction {
CallbackFunction {
object: CallbackObject {
callback: Heap::default()
}
}
}
/// Initialize the callback function with a value.
/// Should be called once this object is done moving.
pub fn init(&mut self, callback: *mut JSObject) {
self.object.callback.set(callback);
}
}
/// A common base class for representing IDL callback interface types.
#[derive(JSTraceable, PartialEq)]
pub struct CallbackInterface {
object: CallbackObject
}
/// A common base class for representing IDL callback function and
/// callback interface types.
#[allow(raw_pointer_derive)]
#[derive(JSTraceable)]
struct CallbackObject {
/// The underlying `JSObject`.
callback: Heap<*mut JSObject>,
}
impl PartialEq for CallbackObject {
fn eq(&self, other: &CallbackObject) -> bool {
self.callback.get() == other.callback.get()
}
}
/// A trait to be implemented by concrete IDL callback function and
/// callback interface types.
pub trait CallbackContainer {
/// Create a new CallbackContainer object for the given `JSObject`.
fn new(callback: *mut JSObject) -> Rc<Self>;
/// Returns the underlying `JSObject`.
fn callback(&self) -> *mut JSObject;
}
impl CallbackInterface {
/// Returns the underlying `JSObject`.
pub fn callback(&self) -> *mut JSObject {
self.object.callback.get()
}
}
impl CallbackFunction {
/// Returns the underlying `JSObject`.
pub fn callback(&self) -> *mut JSObject {
self.object.callback.get()
}
}
impl CallbackInterface {
/// Create a new CallbackInterface object for the given `JSObject`.
pub fn new() -> CallbackInterface {
CallbackInterface {
object: CallbackObject {
callback: Heap::default()
}
}
}
/// Initialize the callback function with a value.
/// Should be called once this object is done moving.
pub fn init(&mut self, callback: *mut JSObject) {
self.object.callback.set(callback);
}
/// Returns the property with the given `name`, if it is a callable object,
/// or an error otherwise.
pub fn get_callable_property(&self, cx: *mut JSContext, name: &str)
-> Fallible<JSVal> {
let mut callable = RootedValue::new(cx, UndefinedValue());
let obj = RootedObject::new(cx, self.callback());
unsafe {
let c_name = CString::new(name).unwrap();
if!JS_GetProperty(cx, obj.handle(), c_name.as_ptr(),
callable.handle_mut()) {
return Err(Error::JSFailed);
}
if!callable.ptr.is_object() ||
!IsCallable(callable.ptr.to_object()) {
return Err(Error::Type(
format!("The value of the {} property is not callable", name)));
}
}
Ok(callable.ptr)
}
}
/// Wraps the reflector for `p` into the compartment of `cx`.
pub fn wrap_call_this_object<T: Reflectable>(cx: *mut JSContext,
p: &T,
rval: MutableHandleObject) {
rval.set(p.reflector().get_jsobject().get());
assert!(!rval.get().is_null());
unsafe {
if!JS_WrapObject(cx, rval) {
rval.set(ptr::null_mut());
}
}
}
/// A class that performs whatever setup we need to safely make a call while
/// this class is on the stack. After `new` returns, the call is safe to make.
pub struct CallSetup {
/// The compartment for reporting exceptions.
/// As a RootedObject, this must be the first field in order to
/// determine the final address on the stack correctly.
exception_compartment: RootedObject,
/// The `JSContext` used for the call.
cx: *mut JSContext,
/// The compartment we were in before the call.
old_compartment: *mut JSCompartment,
/// The exception handling used for the call.
handling: ExceptionHandling,
}
impl CallSetup {
/// Performs the setup needed to make a call.
#[allow(unrooted_must_root)]
pub fn new<T: CallbackContainer>(callback: &T, handling: ExceptionHandling) -> CallSetup {
let global = global_object_for_js_object(callback.callback());
let cx = global.r().get_cx();
unsafe { JS_BeginRequest(cx); }
let exception_compartment = unsafe {
GetGlobalForObjectCrossCompartment(callback.callback())
};
CallSetup {
exception_compartment:
RootedObject::new_with_addr(cx, exception_compartment,
unsafe { return_address() }),
cx: cx,
old_compartment: unsafe { JS_EnterCompartment(cx, callback.callback()) },
handling: handling,
}
}
/// Returns the `JSContext` used for the call.
pub fn | (&self) -> *mut JSContext {
self.cx
}
}
impl Drop for CallSetup {
fn drop(&mut self) {
unsafe { JS_LeaveCompartment(self.cx, self.old_compartment); }
let need_to_deal_with_exception =
self.handling == ExceptionHandling::Report &&
unsafe { JS_IsExceptionPending(self.cx) };
if need_to_deal_with_exception {
unsafe {
let old_global = RootedObject::new(self.cx, self.exception_compartment.ptr);
let saved = JS_SaveFrameChain(self.cx);
{
let _ac = JSAutoCompartment::new(self.cx, old_global.ptr);
JS_ReportPendingException(self.cx);
}
if saved {
JS_RestoreFrameChain(self.cx);
}
}
}
unsafe { JS_EndRequest(self.cx); }
}
}
| get_context | identifier_name |
callback.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Base classes to work with IDL callbacks.
use dom::bindings::error::{Error, Fallible};
use dom::bindings::global::global_object_for_js_object;
use dom::bindings::reflector::Reflectable;
use js::jsapi::GetGlobalForObjectCrossCompartment;
use js::jsapi::{Heap, MutableHandleObject, RootedObject, RootedValue};
use js::jsapi::{IsCallable, JSContext, JSObject, JS_WrapObject};
use js::jsapi::{JSAutoCompartment};
use js::jsapi::{JSCompartment, JS_EnterCompartment, JS_LeaveCompartment};
use js::jsapi::{JS_BeginRequest, JS_EndRequest};
use js::jsapi::{JS_GetProperty, JS_IsExceptionPending, JS_ReportPendingException};
use js::jsapi::{JS_RestoreFrameChain, JS_SaveFrameChain};
use js::jsval::{JSVal, UndefinedValue};
use std::default::Default;
use std::ffi::CString;
use std::intrinsics::return_address;
use std::ptr;
use std::rc::Rc;
/// The exception handling used for a call.
#[derive(Copy, Clone, PartialEq)]
pub enum ExceptionHandling {
/// Report any exception and don't throw it to the caller code.
Report,
/// Throw any exception to the caller code.
Rethrow
}
/// A common base class for representing IDL callback function types.
#[derive(JSTraceable, PartialEq)]
pub struct CallbackFunction {
object: CallbackObject
}
impl CallbackFunction {
/// Create a new `CallbackFunction` for this object.
pub fn new() -> CallbackFunction {
CallbackFunction {
object: CallbackObject {
callback: Heap::default()
}
}
}
/// Initialize the callback function with a value.
/// Should be called once this object is done moving.
pub fn init(&mut self, callback: *mut JSObject) {
self.object.callback.set(callback);
}
}
/// A common base class for representing IDL callback interface types.
#[derive(JSTraceable, PartialEq)]
pub struct CallbackInterface {
object: CallbackObject
}
/// A common base class for representing IDL callback function and
/// callback interface types.
#[allow(raw_pointer_derive)]
#[derive(JSTraceable)]
struct CallbackObject {
/// The underlying `JSObject`.
callback: Heap<*mut JSObject>,
}
impl PartialEq for CallbackObject {
fn eq(&self, other: &CallbackObject) -> bool {
self.callback.get() == other.callback.get()
}
}
/// A trait to be implemented by concrete IDL callback function and
/// callback interface types.
pub trait CallbackContainer {
/// Create a new CallbackContainer object for the given `JSObject`.
fn new(callback: *mut JSObject) -> Rc<Self>;
/// Returns the underlying `JSObject`.
fn callback(&self) -> *mut JSObject;
}
impl CallbackInterface {
/// Returns the underlying `JSObject`.
pub fn callback(&self) -> *mut JSObject {
self.object.callback.get()
}
}
impl CallbackFunction {
/// Returns the underlying `JSObject`.
pub fn callback(&self) -> *mut JSObject {
self.object.callback.get()
}
}
impl CallbackInterface {
/// Create a new CallbackInterface object for the given `JSObject`.
pub fn new() -> CallbackInterface {
CallbackInterface {
object: CallbackObject {
callback: Heap::default()
}
}
}
/// Initialize the callback function with a value.
/// Should be called once this object is done moving.
pub fn init(&mut self, callback: *mut JSObject) {
self.object.callback.set(callback);
}
/// Returns the property with the given `name`, if it is a callable object,
/// or an error otherwise.
pub fn get_callable_property(&self, cx: *mut JSContext, name: &str)
-> Fallible<JSVal> {
let mut callable = RootedValue::new(cx, UndefinedValue());
let obj = RootedObject::new(cx, self.callback());
unsafe {
let c_name = CString::new(name).unwrap();
if!JS_GetProperty(cx, obj.handle(), c_name.as_ptr(),
callable.handle_mut()) {
return Err(Error::JSFailed);
}
if!callable.ptr.is_object() ||
!IsCallable(callable.ptr.to_object()) {
return Err(Error::Type(
format!("The value of the {} property is not callable", name)));
}
}
Ok(callable.ptr)
}
}
/// Wraps the reflector for `p` into the compartment of `cx`.
pub fn wrap_call_this_object<T: Reflectable>(cx: *mut JSContext,
p: &T,
rval: MutableHandleObject) {
rval.set(p.reflector().get_jsobject().get());
assert!(!rval.get().is_null());
unsafe {
if!JS_WrapObject(cx, rval) {
rval.set(ptr::null_mut());
}
}
}
/// A class that performs whatever setup we need to safely make a call while
/// this class is on the stack. After `new` returns, the call is safe to make.
pub struct CallSetup { | exception_compartment: RootedObject,
/// The `JSContext` used for the call.
cx: *mut JSContext,
/// The compartment we were in before the call.
old_compartment: *mut JSCompartment,
/// The exception handling used for the call.
handling: ExceptionHandling,
}
impl CallSetup {
/// Performs the setup needed to make a call.
#[allow(unrooted_must_root)]
pub fn new<T: CallbackContainer>(callback: &T, handling: ExceptionHandling) -> CallSetup {
let global = global_object_for_js_object(callback.callback());
let cx = global.r().get_cx();
unsafe { JS_BeginRequest(cx); }
let exception_compartment = unsafe {
GetGlobalForObjectCrossCompartment(callback.callback())
};
CallSetup {
exception_compartment:
RootedObject::new_with_addr(cx, exception_compartment,
unsafe { return_address() }),
cx: cx,
old_compartment: unsafe { JS_EnterCompartment(cx, callback.callback()) },
handling: handling,
}
}
/// Returns the `JSContext` used for the call.
pub fn get_context(&self) -> *mut JSContext {
self.cx
}
}
impl Drop for CallSetup {
fn drop(&mut self) {
unsafe { JS_LeaveCompartment(self.cx, self.old_compartment); }
let need_to_deal_with_exception =
self.handling == ExceptionHandling::Report &&
unsafe { JS_IsExceptionPending(self.cx) };
if need_to_deal_with_exception {
unsafe {
let old_global = RootedObject::new(self.cx, self.exception_compartment.ptr);
let saved = JS_SaveFrameChain(self.cx);
{
let _ac = JSAutoCompartment::new(self.cx, old_global.ptr);
JS_ReportPendingException(self.cx);
}
if saved {
JS_RestoreFrameChain(self.cx);
}
}
}
unsafe { JS_EndRequest(self.cx); }
}
} | /// The compartment for reporting exceptions.
/// As a RootedObject, this must be the first field in order to
/// determine the final address on the stack correctly. | random_line_split |
klondike.rs | /*
* Copyright (c) 2018 Erik Nordstrøm <[email protected]>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
use arrayvec::ArrayVec;
use std::ops::Deref;
use cards::Card;
impl_cardstack!(StockSlot, StockSlotArray, 21); // 52 - (1 + 2 + 3 + 4 + 5 + 6 + 7) = 21
impl_cardstack!(WastePileSlot, WastePileSlotArray, 21);
impl_cardstack!(FoundationSlot, FoundationSlotArray, 13);
impl_cardstack!(TableauSlot, TableauSlotArray, 19);
pub struct Table
{
pub stock: StockSlot,
pub waste_pile: WastePileSlot,
pub foundations: [FoundationSlot; 4],
pub tableau: [TableauSlot; 7],
} | * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | random_line_split |
klondike.rs | /*
* Copyright (c) 2018 Erik Nordstrøm <[email protected]>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
use arrayvec::ArrayVec;
use std::ops::Deref;
use cards::Card;
impl_cardstack!(StockSlot, StockSlotArray, 21); // 52 - (1 + 2 + 3 + 4 + 5 + 6 + 7) = 21
impl_cardstack!(WastePileSlot, WastePileSlotArray, 21);
impl_cardstack!(FoundationSlot, FoundationSlotArray, 13);
impl_cardstack!(TableauSlot, TableauSlotArray, 19);
pub struct T | {
pub stock: StockSlot,
pub waste_pile: WastePileSlot,
pub foundations: [FoundationSlot; 4],
pub tableau: [TableauSlot; 7],
}
| able
| identifier_name |
moves-based-on-type-exprs.rs | // Tests that references to move-by-default values trigger moves when
// they occur as part of various kinds of expressions.
struct Foo<A> { f: A }
fn guard(_s: ~str) -> bool {fail!()}
fn touch<A>(_a: &A) {}
fn f10() {
let x = ~"hi";
let _y = Foo { f:x };
touch(&x); //~ ERROR use of moved value: `x`
}
fn f20() {
let x = ~"hi";
let _y = (x, 3);
touch(&x); //~ ERROR use of moved value: `x`
}
fn f21() {
let x = ~[1, 2, 3];
let _y = (x[0], 3);
touch(&x);
}
fn f30(cond: bool) {
let x = ~"hi", y = ~"ho";
let _y = if cond {
x
} else {
y
};
touch(&x); //~ ERROR use of moved value: `x`
touch(&y); //~ ERROR use of moved value: `y`
}
fn f40(cond: bool) {
let x = ~"hi", y = ~"ho";
let _y = match cond {
true => x,
false => y
};
touch(&x); //~ ERROR use of moved value: `x`
touch(&y); //~ ERROR use of moved value: `y`
}
fn f50(cond: bool) {
let x = ~"hi", y = ~"ho";
let _y = match cond {
_ if guard(x) => 10,
true => 10,
false => 20,
};
touch(&x); //~ ERROR use of moved value: `x`
touch(&y);
}
fn f70() {
let x = ~"hi";
let _y = [x];
touch(&x); //~ ERROR use of moved value: `x`
}
fn f80() {
let x = ~"hi";
let _y = ~[x];
touch(&x); //~ ERROR use of moved value: `x`
}
fn f90() {
let x = ~"hi";
let _y = @[x];
touch(&x); //~ ERROR use of moved value: `x`
}
fn f100() {
let x = ~[~"hi"];
let _y = x[0];
touch(&x); //~ ERROR use of partially moved value: `x`
}
fn | () {
let x = ~[~"hi"];
let _y = [x[0],..1];
touch(&x); //~ ERROR use of partially moved value: `x`
}
fn f120() {
let x = ~[~"hi", ~"ho"];
x[0] <-> x[1];
touch(&x[0]);
touch(&x[1]);
}
fn main() {}
| f110 | identifier_name |
moves-based-on-type-exprs.rs | // Tests that references to move-by-default values trigger moves when
// they occur as part of various kinds of expressions.
struct Foo<A> { f: A }
fn guard(_s: ~str) -> bool {fail!()}
fn touch<A>(_a: &A) {}
fn f10() {
let x = ~"hi";
let _y = Foo { f:x };
touch(&x); //~ ERROR use of moved value: `x`
}
fn f20() {
let x = ~"hi";
let _y = (x, 3);
touch(&x); //~ ERROR use of moved value: `x`
}
fn f21() {
let x = ~[1, 2, 3];
let _y = (x[0], 3);
touch(&x);
}
fn f30(cond: bool) {
let x = ~"hi", y = ~"ho";
let _y = if cond {
x
} else {
y
};
touch(&x); //~ ERROR use of moved value: `x`
touch(&y); //~ ERROR use of moved value: `y`
}
fn f40(cond: bool) {
let x = ~"hi", y = ~"ho";
let _y = match cond {
true => x,
false => y
};
touch(&x); //~ ERROR use of moved value: `x`
touch(&y); //~ ERROR use of moved value: `y`
}
fn f50(cond: bool) {
let x = ~"hi", y = ~"ho";
let _y = match cond {
_ if guard(x) => 10,
true => 10,
false => 20,
};
touch(&x); //~ ERROR use of moved value: `x`
touch(&y);
}
fn f70() {
let x = ~"hi";
let _y = [x];
touch(&x); //~ ERROR use of moved value: `x`
}
fn f80() {
let x = ~"hi";
let _y = ~[x];
touch(&x); //~ ERROR use of moved value: `x`
}
fn f90() |
fn f100() {
let x = ~[~"hi"];
let _y = x[0];
touch(&x); //~ ERROR use of partially moved value: `x`
}
fn f110() {
let x = ~[~"hi"];
let _y = [x[0],..1];
touch(&x); //~ ERROR use of partially moved value: `x`
}
fn f120() {
let x = ~[~"hi", ~"ho"];
x[0] <-> x[1];
touch(&x[0]);
touch(&x[1]);
}
fn main() {}
| {
let x = ~"hi";
let _y = @[x];
touch(&x); //~ ERROR use of moved value: `x`
} | identifier_body |
moves-based-on-type-exprs.rs | // Tests that references to move-by-default values trigger moves when
// they occur as part of various kinds of expressions.
struct Foo<A> { f: A }
fn guard(_s: ~str) -> bool {fail!()}
fn touch<A>(_a: &A) {}
fn f10() {
let x = ~"hi";
let _y = Foo { f:x };
touch(&x); //~ ERROR use of moved value: `x`
}
fn f20() {
let x = ~"hi";
let _y = (x, 3);
touch(&x); //~ ERROR use of moved value: `x`
}
fn f21() {
let x = ~[1, 2, 3];
let _y = (x[0], 3);
touch(&x);
}
fn f30(cond: bool) {
let x = ~"hi", y = ~"ho";
let _y = if cond {
x
} else { | touch(&y); //~ ERROR use of moved value: `y`
}
fn f40(cond: bool) {
let x = ~"hi", y = ~"ho";
let _y = match cond {
true => x,
false => y
};
touch(&x); //~ ERROR use of moved value: `x`
touch(&y); //~ ERROR use of moved value: `y`
}
fn f50(cond: bool) {
let x = ~"hi", y = ~"ho";
let _y = match cond {
_ if guard(x) => 10,
true => 10,
false => 20,
};
touch(&x); //~ ERROR use of moved value: `x`
touch(&y);
}
fn f70() {
let x = ~"hi";
let _y = [x];
touch(&x); //~ ERROR use of moved value: `x`
}
fn f80() {
let x = ~"hi";
let _y = ~[x];
touch(&x); //~ ERROR use of moved value: `x`
}
fn f90() {
let x = ~"hi";
let _y = @[x];
touch(&x); //~ ERROR use of moved value: `x`
}
fn f100() {
let x = ~[~"hi"];
let _y = x[0];
touch(&x); //~ ERROR use of partially moved value: `x`
}
fn f110() {
let x = ~[~"hi"];
let _y = [x[0],..1];
touch(&x); //~ ERROR use of partially moved value: `x`
}
fn f120() {
let x = ~[~"hi", ~"ho"];
x[0] <-> x[1];
touch(&x[0]);
touch(&x[1]);
}
fn main() {} | y
};
touch(&x); //~ ERROR use of moved value: `x` | random_line_split |
feature-gate.rs | // Test that use of structural-match traits is only permitted with a feature gate,
// and that if a feature gate is supplied, it permits the type to be
// used in a match.
// revisions: with_gate no_gate
// gate-test-structural_match
#![allow(unused)]
#![feature(rustc_attrs)]
#![cfg_attr(with_gate, feature(structural_match))]
struct Foo {
x: u32
}
const FOO: Foo = Foo { x: 0 };
#[rustc_error]
fn main() |
impl std::marker::StructuralPartialEq for Foo { }
//[no_gate]~^ ERROR use of unstable library feature'structural_match'
impl std::marker::StructuralEq for Foo { }
//[no_gate]~^ ERROR use of unstable library feature'structural_match'
impl PartialEq<Foo> for Foo {
fn eq(&self, other: &Self) -> bool {
self.x == other.x
}
}
impl Eq for Foo { }
| { //[with_gate]~ ERROR fatal error triggered by #[rustc_error]
let y = Foo { x: 1 };
match y {
FOO => { }
_ => { }
}
} | identifier_body |
feature-gate.rs | // Test that use of structural-match traits is only permitted with a feature gate,
// and that if a feature gate is supplied, it permits the type to be
// used in a match.
// revisions: with_gate no_gate
// gate-test-structural_match
#![allow(unused)]
#![feature(rustc_attrs)]
#![cfg_attr(with_gate, feature(structural_match))]
struct Foo {
x: u32
}
const FOO: Foo = Foo { x: 0 };
#[rustc_error]
fn | () { //[with_gate]~ ERROR fatal error triggered by #[rustc_error]
let y = Foo { x: 1 };
match y {
FOO => { }
_ => { }
}
}
impl std::marker::StructuralPartialEq for Foo { }
//[no_gate]~^ ERROR use of unstable library feature'structural_match'
impl std::marker::StructuralEq for Foo { }
//[no_gate]~^ ERROR use of unstable library feature'structural_match'
impl PartialEq<Foo> for Foo {
fn eq(&self, other: &Self) -> bool {
self.x == other.x
}
}
impl Eq for Foo { }
| main | identifier_name |
feature-gate.rs | // Test that use of structural-match traits is only permitted with a feature gate,
// and that if a feature gate is supplied, it permits the type to be
// used in a match.
// revisions: with_gate no_gate
// gate-test-structural_match
#![allow(unused)]
#![feature(rustc_attrs)]
#![cfg_attr(with_gate, feature(structural_match))]
struct Foo {
x: u32
}
const FOO: Foo = Foo { x: 0 };
#[rustc_error]
fn main() { //[with_gate]~ ERROR fatal error triggered by #[rustc_error]
let y = Foo { x: 1 };
match y {
FOO => { }
_ => { }
}
}
impl std::marker::StructuralPartialEq for Foo { }
//[no_gate]~^ ERROR use of unstable library feature'structural_match'
impl std::marker::StructuralEq for Foo { } | self.x == other.x
}
}
impl Eq for Foo { } | //[no_gate]~^ ERROR use of unstable library feature 'structural_match'
impl PartialEq<Foo> for Foo {
fn eq(&self, other: &Self) -> bool { | random_line_split |
feature-gate.rs | // Test that use of structural-match traits is only permitted with a feature gate,
// and that if a feature gate is supplied, it permits the type to be
// used in a match.
// revisions: with_gate no_gate
// gate-test-structural_match
#![allow(unused)]
#![feature(rustc_attrs)]
#![cfg_attr(with_gate, feature(structural_match))]
struct Foo {
x: u32
}
const FOO: Foo = Foo { x: 0 };
#[rustc_error]
fn main() { //[with_gate]~ ERROR fatal error triggered by #[rustc_error]
let y = Foo { x: 1 };
match y {
FOO => |
_ => { }
}
}
impl std::marker::StructuralPartialEq for Foo { }
//[no_gate]~^ ERROR use of unstable library feature'structural_match'
impl std::marker::StructuralEq for Foo { }
//[no_gate]~^ ERROR use of unstable library feature'structural_match'
impl PartialEq<Foo> for Foo {
fn eq(&self, other: &Self) -> bool {
self.x == other.x
}
}
impl Eq for Foo { }
| { } | conditional_block |
issue-2735-3.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This test should behave exactly like issue-2735-2
struct defer<'self> {
b: &'self mut bool,
}
#[unsafe_destructor]
impl<'self> Drop for defer<'self> {
fn finalize(&self) {
unsafe {
*(self.b) = true;
}
} |
fn defer<'r>(b: &'r mut bool) -> defer<'r> {
defer {
b: b
}
}
pub fn main() {
let mut dtor_ran = false;
defer(&mut dtor_ran);
assert!((dtor_ran));
} | } | random_line_split |
issue-2735-3.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This test should behave exactly like issue-2735-2
struct defer<'self> {
b: &'self mut bool,
}
#[unsafe_destructor]
impl<'self> Drop for defer<'self> {
fn finalize(&self) {
unsafe {
*(self.b) = true;
}
}
}
fn | <'r>(b: &'r mut bool) -> defer<'r> {
defer {
b: b
}
}
pub fn main() {
let mut dtor_ran = false;
defer(&mut dtor_ran);
assert!((dtor_ran));
}
| defer | identifier_name |
issue-2735-3.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This test should behave exactly like issue-2735-2
struct defer<'self> {
b: &'self mut bool,
}
#[unsafe_destructor]
impl<'self> Drop for defer<'self> {
fn finalize(&self) |
}
fn defer<'r>(b: &'r mut bool) -> defer<'r> {
defer {
b: b
}
}
pub fn main() {
let mut dtor_ran = false;
defer(&mut dtor_ran);
assert!((dtor_ran));
}
| {
unsafe {
*(self.b) = true;
}
} | identifier_body |
script_msg.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use AnimationState;
use DocumentState;
use IFrameLoadInfo;
use MouseButton;
use MouseEventType;
use MozBrowserEvent;
use canvas_traits::CanvasMsg;
use euclid::point::Point2D;
use euclid::size::Size2D;
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::{Failure, NavigationDirection, PipelineId};
use msg::constellation_msg::{LoadData, SubpageId};
use offscreen_gl_context::GLContextAttributes;
use style_traits::cursor::Cursor;
use style_traits::viewport::ViewportConstraints;
use url::Url;
/// Messages from the layout to the constellation.
#[derive(Deserialize, Serialize)]
pub enum LayoutMsg {
/// Indicates whether this pipeline is currently running animations.
ChangeRunningAnimationsState(PipelineId, AnimationState),
/// Layout thread failure.
Failure(Failure),
/// Requests that the constellation inform the compositor of the a cursor change.
SetCursor(Cursor),
/// Notifies the constellation that the viewport has been constrained in some manner
ViewportConstrained(PipelineId, ViewportConstraints),
}
/// Messages from the script to the constellation.
#[derive(Deserialize, Serialize)]
pub enum | {
/// Indicates whether this pipeline is currently running animations.
ChangeRunningAnimationsState(PipelineId, AnimationState),
/// Requests that a new 2D canvas thread be created. (This is done in the constellation because
/// 2D canvases may use the GPU and we don't want to give untrusted content access to the GPU.)
CreateCanvasPaintThread(Size2D<i32>, IpcSender<(IpcSender<CanvasMsg>, usize)>),
/// Requests that a new WebGL thread be created. (This is done in the constellation because
/// WebGL uses the GPU and we don't want to give untrusted content access to the GPU.)
CreateWebGLPaintThread(Size2D<i32>,
GLContextAttributes,
IpcSender<Result<(IpcSender<CanvasMsg>, usize), String>>),
/// Dispatched after the DOM load event has fired on a document
/// Causes a `load` event to be dispatched to any enclosing frame context element
/// for the given pipeline.
DOMLoad(PipelineId),
/// Script thread failure.
Failure(Failure),
/// Notifies the constellation that this frame has received focus.
Focus(PipelineId),
/// Re-send a mouse button event that was sent to the parent window.
ForwardMouseButtonEvent(PipelineId, MouseEventType, MouseButton, Point2D<f32>),
/// Re-send a mouse move event that was sent to the parent window.
ForwardMouseMoveEvent(PipelineId, Point2D<f32>),
/// Requests that the constellation retrieve the current contents of the clipboard
GetClipboardContents(IpcSender<String>),
/// <head> tag finished parsing
HeadParsed,
/// All pending loads are complete.
LoadComplete(PipelineId),
/// A new load has been requested.
LoadUrl(PipelineId, LoadData),
/// Dispatch a mozbrowser event to a given iframe. Only available in experimental mode.
MozBrowserEvent(PipelineId, SubpageId, MozBrowserEvent),
/// HTMLIFrameElement Forward or Back navigation.
Navigate(Option<(PipelineId, SubpageId)>, NavigationDirection),
/// Favicon detected
NewFavicon(Url),
/// Status message to be displayed in the chrome, eg. a link URL on mouseover.
NodeStatus(Option<String>),
/// Notification that this iframe should be removed.
RemoveIFrame(PipelineId),
/// A load has been requested in an IFrame.
ScriptLoadedURLInIFrame(IFrameLoadInfo),
/// Requests that the constellation set the contents of the clipboard
SetClipboardContents(String),
/// Mark a new document as active
ActivateDocument(PipelineId),
/// Set the document state for a pipeline (used by screenshot / reftests)
SetDocumentState(PipelineId, DocumentState),
/// Update the pipeline Url, which can change after redirections.
SetFinalUrl(PipelineId, Url),
}
| ScriptMsg | identifier_name |
script_msg.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use AnimationState;
use DocumentState;
use IFrameLoadInfo;
use MouseButton;
use MouseEventType;
use MozBrowserEvent;
use canvas_traits::CanvasMsg;
use euclid::point::Point2D;
use euclid::size::Size2D;
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::{Failure, NavigationDirection, PipelineId};
use msg::constellation_msg::{LoadData, SubpageId};
use offscreen_gl_context::GLContextAttributes;
use style_traits::cursor::Cursor;
use style_traits::viewport::ViewportConstraints;
use url::Url;
/// Messages from the layout to the constellation.
#[derive(Deserialize, Serialize)]
pub enum LayoutMsg {
/// Indicates whether this pipeline is currently running animations.
ChangeRunningAnimationsState(PipelineId, AnimationState),
/// Layout thread failure.
Failure(Failure),
/// Requests that the constellation inform the compositor of the a cursor change.
SetCursor(Cursor),
/// Notifies the constellation that the viewport has been constrained in some manner
ViewportConstrained(PipelineId, ViewportConstraints),
}
/// Messages from the script to the constellation.
#[derive(Deserialize, Serialize)]
pub enum ScriptMsg {
/// Indicates whether this pipeline is currently running animations.
ChangeRunningAnimationsState(PipelineId, AnimationState),
/// Requests that a new 2D canvas thread be created. (This is done in the constellation because
/// 2D canvases may use the GPU and we don't want to give untrusted content access to the GPU.)
CreateCanvasPaintThread(Size2D<i32>, IpcSender<(IpcSender<CanvasMsg>, usize)>),
/// Requests that a new WebGL thread be created. (This is done in the constellation because
/// WebGL uses the GPU and we don't want to give untrusted content access to the GPU.)
CreateWebGLPaintThread(Size2D<i32>,
GLContextAttributes,
IpcSender<Result<(IpcSender<CanvasMsg>, usize), String>>),
/// Dispatched after the DOM load event has fired on a document
/// Causes a `load` event to be dispatched to any enclosing frame context element
/// for the given pipeline.
DOMLoad(PipelineId),
/// Script thread failure.
Failure(Failure),
/// Notifies the constellation that this frame has received focus.
Focus(PipelineId),
/// Re-send a mouse button event that was sent to the parent window.
ForwardMouseButtonEvent(PipelineId, MouseEventType, MouseButton, Point2D<f32>),
/// Re-send a mouse move event that was sent to the parent window.
ForwardMouseMoveEvent(PipelineId, Point2D<f32>),
/// Requests that the constellation retrieve the current contents of the clipboard
GetClipboardContents(IpcSender<String>),
/// <head> tag finished parsing
HeadParsed,
/// All pending loads are complete.
LoadComplete(PipelineId),
/// A new load has been requested.
LoadUrl(PipelineId, LoadData),
/// Dispatch a mozbrowser event to a given iframe. Only available in experimental mode.
MozBrowserEvent(PipelineId, SubpageId, MozBrowserEvent),
/// HTMLIFrameElement Forward or Back navigation.
Navigate(Option<(PipelineId, SubpageId)>, NavigationDirection),
/// Favicon detected
NewFavicon(Url),
/// Status message to be displayed in the chrome, eg. a link URL on mouseover.
NodeStatus(Option<String>),
/// Notification that this iframe should be removed.
RemoveIFrame(PipelineId),
/// A load has been requested in an IFrame.
ScriptLoadedURLInIFrame(IFrameLoadInfo), | SetDocumentState(PipelineId, DocumentState),
/// Update the pipeline Url, which can change after redirections.
SetFinalUrl(PipelineId, Url),
} | /// Requests that the constellation set the contents of the clipboard
SetClipboardContents(String),
/// Mark a new document as active
ActivateDocument(PipelineId),
/// Set the document state for a pipeline (used by screenshot / reftests) | random_line_split |
issue-11267.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
| fn next(&mut self) -> Option<U>;
}
impl T<int> for Empty {
fn next(&mut self) -> Option<int> { None }
}
fn do_something_with(a : &mut T<int>) {
println!("{:?}", a.next())
}
pub fn main() {
do_something_with(&mut Empty);
} | // Tests that unary structs can be mutably borrowed.
struct Empty;
trait T<U> { | random_line_split |
issue-11267.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests that unary structs can be mutably borrowed.
struct Empty;
trait T<U> {
fn next(&mut self) -> Option<U>;
}
impl T<int> for Empty {
fn | (&mut self) -> Option<int> { None }
}
fn do_something_with(a : &mut T<int>) {
println!("{:?}", a.next())
}
pub fn main() {
do_something_with(&mut Empty);
}
| next | identifier_name |
misc.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use regex::Regex;
mat!(prefix_literal_match, r"^abc", r"abc", Some((0, 3)));
mat!(prefix_literal_nomatch, r"^abc", r"zabc", None);
mat!(one_literal_edge, r"abc", r"xxxxxab", None);
#[test]
fn eq() { | } | assert_eq!(regex!(r"[a-z]+"), Regex::new("[a-z]+").unwrap()); | random_line_split |
misc.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use regex::Regex;
mat!(prefix_literal_match, r"^abc", r"abc", Some((0, 3)));
mat!(prefix_literal_nomatch, r"^abc", r"zabc", None);
mat!(one_literal_edge, r"abc", r"xxxxxab", None);
#[test]
fn | () {
assert_eq!(regex!(r"[a-z]+"), Regex::new("[a-z]+").unwrap());
}
| eq | identifier_name |
misc.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use regex::Regex;
mat!(prefix_literal_match, r"^abc", r"abc", Some((0, 3)));
mat!(prefix_literal_nomatch, r"^abc", r"zabc", None);
mat!(one_literal_edge, r"abc", r"xxxxxab", None);
#[test]
fn eq() | {
assert_eq!(regex!(r"[a-z]+"), Regex::new("[a-z]+").unwrap());
} | identifier_body |
|
eseful.rs | // Utility functions/data for EFL Rust bindings.
// Copyright (C) 2014 Luis Araujo <[email protected]>
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
/*
* Data and functions to make different tasks easier and more
* convenient (mainly handlers/callbacks).
*/
extern crate libc;
use std::mem::forget;
use std::ffi::CString;
use eseful::libc::c_char;
use eina;
// Empty value handy to use in the 'data' field for callbacks.
pub static Empty: Option<()> = None;
// Callbacks event.
pub struct EventInfo;
pub fn to_c_args(argv: Vec<String>) -> *const *const c_char {
let mut vchars: Vec<*const c_char> = Vec::new();
for s in argv {
vchars.push(CString::new(s).unwrap().as_ptr());
}
let vchars_ptr = vchars.as_ptr();
// Forget the vector of chars so it can be stored statically from C.
forget(vchars);
return vchars_ptr;
}
pub fn from_bool_to_eina(b: bool) -> eina::EinaBool {
match b {
true => eina::EINA_TRUE,
false => eina::EINA_FALSE
}
}
pub fn | (eb: eina::EinaBool) -> bool {
match eb {
eina::EINA_TRUE => true,
_ => false
}
}
| from_eina_to_bool | identifier_name |
eseful.rs | // Utility functions/data for EFL Rust bindings.
// Copyright (C) 2014 Luis Araujo <[email protected]>
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
/*
* Data and functions to make different tasks easier and more
* convenient (mainly handlers/callbacks).
*/
extern crate libc;
| use std::mem::forget;
use std::ffi::CString;
use eseful::libc::c_char;
use eina;
// Empty value handy to use in the 'data' field for callbacks.
pub static Empty: Option<()> = None;
// Callbacks event.
pub struct EventInfo;
pub fn to_c_args(argv: Vec<String>) -> *const *const c_char {
let mut vchars: Vec<*const c_char> = Vec::new();
for s in argv {
vchars.push(CString::new(s).unwrap().as_ptr());
}
let vchars_ptr = vchars.as_ptr();
// Forget the vector of chars so it can be stored statically from C.
forget(vchars);
return vchars_ptr;
}
pub fn from_bool_to_eina(b: bool) -> eina::EinaBool {
match b {
true => eina::EINA_TRUE,
false => eina::EINA_FALSE
}
}
pub fn from_eina_to_bool(eb: eina::EinaBool) -> bool {
match eb {
eina::EINA_TRUE => true,
_ => false
}
} | random_line_split |
|
eseful.rs | // Utility functions/data for EFL Rust bindings.
// Copyright (C) 2014 Luis Araujo <[email protected]>
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
/*
* Data and functions to make different tasks easier and more
* convenient (mainly handlers/callbacks).
*/
extern crate libc;
use std::mem::forget;
use std::ffi::CString;
use eseful::libc::c_char;
use eina;
// Empty value handy to use in the 'data' field for callbacks.
pub static Empty: Option<()> = None;
// Callbacks event.
pub struct EventInfo;
pub fn to_c_args(argv: Vec<String>) -> *const *const c_char {
let mut vchars: Vec<*const c_char> = Vec::new();
for s in argv {
vchars.push(CString::new(s).unwrap().as_ptr());
}
let vchars_ptr = vchars.as_ptr();
// Forget the vector of chars so it can be stored statically from C.
forget(vchars);
return vchars_ptr;
}
pub fn from_bool_to_eina(b: bool) -> eina::EinaBool {
match b {
true => eina::EINA_TRUE,
false => eina::EINA_FALSE
}
}
pub fn from_eina_to_bool(eb: eina::EinaBool) -> bool | {
match eb {
eina::EINA_TRUE => true,
_ => false
}
} | identifier_body |
|
scheduler.rs | //
// scheduler.rs
// Copyright (C) 2017 Szymon Urbaś <[email protected]>
// Distributed under terms of the BSD (2-clause) license.
//
// Created on: 13 Mar 2017 21:03:41 +0100 (CET)
//
use process::*;
use tui::*; | fn schedule(&mut self);
fn has_processes(&self) -> bool;
fn add_process(&mut self, Process);
fn current_proc(&self) -> Option<&Process>;
fn current_proc_mut(&mut self) -> Option<&mut Process>;
fn kill_current_proc(&mut self);
fn list_processes(&self, &mut Tui);
fn increase_waiting_times(&mut self);
fn context_switch_num(&self) -> usize;
}
/*
* vi: ts=2 sw=2 expandtab
*/ |
pub trait Scheduler {
fn name(&self) -> String; | random_line_split |
htmlheadelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLHeadElementBinding;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, HTMLHeadElementDerived};
use dom::bindings::js::Root;
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId};
use dom::userscripts::load_script;
use dom::virtualmethods::VirtualMethods;
use util::str::DOMString;
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct HTMLHeadElement {
htmlelement: HTMLElement
}
impl HTMLHeadElementDerived for EventTarget {
fn is_htmlheadelement(&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLHeadElement)))
}
}
impl HTMLHeadElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> HTMLHeadElement {
HTMLHeadElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLHeadElement, localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLHeadElement> {
let element = HTMLHeadElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLHeadElementBinding::Wrap)
}
}
impl<'a> VirtualMethods for &'a HTMLHeadElement {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> |
fn bind_to_tree(&self, _tree_in_doc: bool) {
load_script(*self);
}
}
| {
let htmlelement: &&HTMLElement = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
} | identifier_body |
htmlheadelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLHeadElementBinding;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, HTMLHeadElementDerived};
use dom::bindings::js::Root;
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId};
use dom::userscripts::load_script;
use dom::virtualmethods::VirtualMethods;
use util::str::DOMString;
| pub struct HTMLHeadElement {
htmlelement: HTMLElement
}
impl HTMLHeadElementDerived for EventTarget {
fn is_htmlheadelement(&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLHeadElement)))
}
}
impl HTMLHeadElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> HTMLHeadElement {
HTMLHeadElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLHeadElement, localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLHeadElement> {
let element = HTMLHeadElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLHeadElementBinding::Wrap)
}
}
impl<'a> VirtualMethods for &'a HTMLHeadElement {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let htmlelement: &&HTMLElement = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn bind_to_tree(&self, _tree_in_doc: bool) {
load_script(*self);
}
} | #[dom_struct]
#[derive(HeapSizeOf)] | random_line_split |
htmlheadelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLHeadElementBinding;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, HTMLHeadElementDerived};
use dom::bindings::js::Root;
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId};
use dom::userscripts::load_script;
use dom::virtualmethods::VirtualMethods;
use util::str::DOMString;
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct HTMLHeadElement {
htmlelement: HTMLElement
}
impl HTMLHeadElementDerived for EventTarget {
fn is_htmlheadelement(&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLHeadElement)))
}
}
impl HTMLHeadElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> HTMLHeadElement {
HTMLHeadElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLHeadElement, localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn | (localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLHeadElement> {
let element = HTMLHeadElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLHeadElementBinding::Wrap)
}
}
impl<'a> VirtualMethods for &'a HTMLHeadElement {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let htmlelement: &&HTMLElement = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn bind_to_tree(&self, _tree_in_doc: bool) {
load_script(*self);
}
}
| new | identifier_name |
match-arm-statics.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct NewBool(bool);
enum Direction {
North,
East,
South,
West
}
static TRUE_TRUE: (bool, bool) = (true, true);
fn nonexhaustive_1() {
match (true, false) {
//~^ ERROR non-exhaustive patterns: `(true, false)` not covered
TRUE_TRUE => (),
(false, false) => (),
(false, true) => ()
}
}
fn unreachable_1() {
match (true, false) {
TRUE_TRUE => (),
(false, false) => (),
(false, true) => (),
(true, false) => (),
(true, true) => ()
//~^ ERROR unreachable pattern
}
}
static NONE: Option<Direction> = None;
static EAST: Direction = East;
fn nonexhaustive_2() {
match Some(Some(North)) {
//~^ ERROR non-exhaustive patterns: `Some(Some(West))` not covered
Some(NONE) => (), | None => ()
}
}
fn unreachable_2() {
match Some(Some(North)) {
Some(NONE) => (),
Some(Some(North)) => (),
Some(Some(EAST)) => (),
Some(Some(South)) => (),
Some(Some(West)) => (),
Some(Some(East)) => (),
//~^ ERROR unreachable pattern
None => ()
}
}
static NEW_FALSE: NewBool = NewBool(false);
struct Foo {
bar: Option<Direction>,
baz: NewBool
}
static STATIC_FOO: Foo = Foo { bar: None, baz: NEW_FALSE };
fn nonexhaustive_3() {
match (Foo { bar: Some(North), baz: NewBool(true) }) {
//~^ ERROR non-exhaustive patterns: `Foo { bar: Some(North), baz: NewBool(true) }`
Foo { bar: None, baz: NewBool(true) } => (),
Foo { bar: _, baz: NEW_FALSE } => (),
Foo { bar: Some(West), baz: NewBool(true) } => (),
Foo { bar: Some(South),.. } => (),
Foo { bar: Some(EAST),.. } => ()
}
}
fn unreachable_3() {
match (Foo { bar: Some(EAST), baz: NewBool(true) }) {
Foo { bar: None, baz: NewBool(true) } => (),
Foo { bar: _, baz: NEW_FALSE } => (),
Foo { bar: Some(West), baz: NewBool(true) } => (),
Foo { bar: Some(South),.. } => (),
Foo { bar: Some(EAST),.. } => (),
Foo { bar: Some(North), baz: NewBool(true) } => (),
Foo { bar: Some(EAST), baz: NewBool(false) } => ()
//~^ ERROR unreachable pattern
}
}
fn main() {
nonexhaustive_1();
nonexhaustive_2();
nonexhaustive_3();
unreachable_1();
unreachable_2();
unreachable_3();
} | Some(Some(North)) => (),
Some(Some(EAST)) => (),
Some(Some(South)) => (), | random_line_split |
match-arm-statics.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct NewBool(bool);
enum Direction {
North,
East,
South,
West
}
static TRUE_TRUE: (bool, bool) = (true, true);
fn nonexhaustive_1() {
match (true, false) {
//~^ ERROR non-exhaustive patterns: `(true, false)` not covered
TRUE_TRUE => (),
(false, false) => (),
(false, true) => ()
}
}
fn unreachable_1() {
match (true, false) {
TRUE_TRUE => (),
(false, false) => (),
(false, true) => (),
(true, false) => (),
(true, true) => ()
//~^ ERROR unreachable pattern
}
}
static NONE: Option<Direction> = None;
static EAST: Direction = East;
fn nonexhaustive_2() {
match Some(Some(North)) {
//~^ ERROR non-exhaustive patterns: `Some(Some(West))` not covered
Some(NONE) => (),
Some(Some(North)) => (),
Some(Some(EAST)) => (),
Some(Some(South)) => (),
None => ()
}
}
fn unreachable_2() {
match Some(Some(North)) {
Some(NONE) => (),
Some(Some(North)) => (),
Some(Some(EAST)) => (),
Some(Some(South)) => (),
Some(Some(West)) => (),
Some(Some(East)) => (),
//~^ ERROR unreachable pattern
None => ()
}
}
static NEW_FALSE: NewBool = NewBool(false);
struct Foo {
bar: Option<Direction>,
baz: NewBool
}
static STATIC_FOO: Foo = Foo { bar: None, baz: NEW_FALSE };
fn nonexhaustive_3() {
match (Foo { bar: Some(North), baz: NewBool(true) }) {
//~^ ERROR non-exhaustive patterns: `Foo { bar: Some(North), baz: NewBool(true) }`
Foo { bar: None, baz: NewBool(true) } => (),
Foo { bar: _, baz: NEW_FALSE } => (),
Foo { bar: Some(West), baz: NewBool(true) } => (),
Foo { bar: Some(South),.. } => (),
Foo { bar: Some(EAST),.. } => ()
}
}
fn | () {
match (Foo { bar: Some(EAST), baz: NewBool(true) }) {
Foo { bar: None, baz: NewBool(true) } => (),
Foo { bar: _, baz: NEW_FALSE } => (),
Foo { bar: Some(West), baz: NewBool(true) } => (),
Foo { bar: Some(South),.. } => (),
Foo { bar: Some(EAST),.. } => (),
Foo { bar: Some(North), baz: NewBool(true) } => (),
Foo { bar: Some(EAST), baz: NewBool(false) } => ()
//~^ ERROR unreachable pattern
}
}
fn main() {
nonexhaustive_1();
nonexhaustive_2();
nonexhaustive_3();
unreachable_1();
unreachable_2();
unreachable_3();
}
| unreachable_3 | identifier_name |
output.rs | //! Producing output from Value.
//!
//! This module defines how a Value is serialized as an output of the expression.
//! See also the `json` module.
#![allow(useless_format)]
use std::fmt;
use conv::TryFrom;
use conv::errors::GeneralError;
use rustc_serialize::json::ToJson;
use super::Value;
impl TryFrom<Value> for String {
type Err = GeneralError<&'static str>;
#[inline]
fn | (src: Value) -> Result<Self, Self::Err> {
String::try_from(&src)
}
}
impl<'a> TryFrom<&'a Value> for String {
type Err = <String as TryFrom<Value>>::Err;
/// Try to convert a Value to string that can be emitted
/// as a final result of a computation.
fn try_from(src: &'a Value) -> Result<Self, Self::Err> {
match *src {
Value::Empty => Err(GeneralError::Unrepresentable(
"cannot serialize an empty value"
)),
Value::Symbol(ref t) => Ok(format!("{}", t)),
Value::Boolean(ref b) => Ok(format!("{}", b)),
Value::Integer(ref i) => Ok(format!("{}", i)),
Value::Float(ref f) => {
// always include decimal point and zero, even if the float
// is actually an integer
let mut res = f.to_string();
if!res.contains('.') {
res.push_str(".0");
}
Ok(res)
},
Value::String(ref s) => Ok(s.clone()),
Value::Regex(..) => Err(GeneralError::Unrepresentable(
"cannot serialize a regex"
)),
Value::Array(ref a) => {
// for final display, an array is assumed to contain lines of output
Ok(format!("{}", a.iter()
.map(|v| format!("{}", v)).collect::<Vec<String>>()
.join("\n")))
},
Value::Object(..) => Ok(src.to_json().to_string()),
Value::Function(..) => Err(GeneralError::Unrepresentable(
"cannot serialize a function"
)),
}
}
}
impl fmt::Display for Value {
/// Format a Value for outputing it as a result of the computation.
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
String::try_from(self)
.map(|s| write!(fmt, "{}", s))
// TODO(xion): return an Err(fmt::Error) rather than panicking
// when formatting constructs actually react to it constructively
.expect(&format!("can't display a value of type `{}`", self.typename()))
}
}
| try_from | identifier_name |
output.rs | //! Producing output from Value.
//!
//! This module defines how a Value is serialized as an output of the expression.
//! See also the `json` module.
#![allow(useless_format)]
use std::fmt;
use conv::TryFrom;
use conv::errors::GeneralError;
use rustc_serialize::json::ToJson;
use super::Value;
impl TryFrom<Value> for String {
type Err = GeneralError<&'static str>;
#[inline]
fn try_from(src: Value) -> Result<Self, Self::Err> {
String::try_from(&src)
}
}
impl<'a> TryFrom<&'a Value> for String {
type Err = <String as TryFrom<Value>>::Err;
/// Try to convert a Value to string that can be emitted
/// as a final result of a computation.
fn try_from(src: &'a Value) -> Result<Self, Self::Err> {
match *src {
Value::Empty => Err(GeneralError::Unrepresentable(
"cannot serialize an empty value"
)),
Value::Symbol(ref t) => Ok(format!("{}", t)),
Value::Boolean(ref b) => Ok(format!("{}", b)),
Value::Integer(ref i) => Ok(format!("{}", i)),
Value::Float(ref f) => {
// always include decimal point and zero, even if the float
// is actually an integer
let mut res = f.to_string();
if!res.contains('.') {
res.push_str(".0");
}
Ok(res)
},
Value::String(ref s) => Ok(s.clone()),
Value::Regex(..) => Err(GeneralError::Unrepresentable(
"cannot serialize a regex"
)),
Value::Array(ref a) => {
// for final display, an array is assumed to contain lines of output
Ok(format!("{}", a.iter()
.map(|v| format!("{}", v)).collect::<Vec<String>>()
.join("\n")))
},
Value::Object(..) => Ok(src.to_json().to_string()),
Value::Function(..) => Err(GeneralError::Unrepresentable(
"cannot serialize a function"
)),
}
}
}
impl fmt::Display for Value {
/// Format a Value for outputing it as a result of the computation.
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
String::try_from(self) | // when formatting constructs actually react to it constructively
.expect(&format!("can't display a value of type `{}`", self.typename()))
}
} | .map(|s| write!(fmt, "{}", s))
// TODO(xion): return an Err(fmt::Error) rather than panicking | random_line_split |
output.rs | //! Producing output from Value.
//!
//! This module defines how a Value is serialized as an output of the expression.
//! See also the `json` module.
#![allow(useless_format)]
use std::fmt;
use conv::TryFrom;
use conv::errors::GeneralError;
use rustc_serialize::json::ToJson;
use super::Value;
impl TryFrom<Value> for String {
type Err = GeneralError<&'static str>;
#[inline]
fn try_from(src: Value) -> Result<Self, Self::Err> |
}
impl<'a> TryFrom<&'a Value> for String {
type Err = <String as TryFrom<Value>>::Err;
/// Try to convert a Value to string that can be emitted
/// as a final result of a computation.
fn try_from(src: &'a Value) -> Result<Self, Self::Err> {
match *src {
Value::Empty => Err(GeneralError::Unrepresentable(
"cannot serialize an empty value"
)),
Value::Symbol(ref t) => Ok(format!("{}", t)),
Value::Boolean(ref b) => Ok(format!("{}", b)),
Value::Integer(ref i) => Ok(format!("{}", i)),
Value::Float(ref f) => {
// always include decimal point and zero, even if the float
// is actually an integer
let mut res = f.to_string();
if!res.contains('.') {
res.push_str(".0");
}
Ok(res)
},
Value::String(ref s) => Ok(s.clone()),
Value::Regex(..) => Err(GeneralError::Unrepresentable(
"cannot serialize a regex"
)),
Value::Array(ref a) => {
// for final display, an array is assumed to contain lines of output
Ok(format!("{}", a.iter()
.map(|v| format!("{}", v)).collect::<Vec<String>>()
.join("\n")))
},
Value::Object(..) => Ok(src.to_json().to_string()),
Value::Function(..) => Err(GeneralError::Unrepresentable(
"cannot serialize a function"
)),
}
}
}
impl fmt::Display for Value {
/// Format a Value for outputing it as a result of the computation.
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
String::try_from(self)
.map(|s| write!(fmt, "{}", s))
// TODO(xion): return an Err(fmt::Error) rather than panicking
// when formatting constructs actually react to it constructively
.expect(&format!("can't display a value of type `{}`", self.typename()))
}
}
| {
String::try_from(&src)
} | identifier_body |
variadic-ffi.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern "stdcall" {
fn printf(_: *const u8,...); //~ ERROR: variadic function must have C calling convention
}
extern {
fn foo(f: int, x: u8,...);
}
extern "C" fn bar(f: int, x: u8) {}
fn main() | foo(1, 2, 1u16); //~ ERROR: can't pass u16 to variadic function, cast to c_uint
}
}
| {
unsafe {
foo(); //~ ERROR: this function takes at least 2 parameters but 0 parameters were supplied
foo(1); //~ ERROR: this function takes at least 2 parameters but 1 parameter was supplied
let x: unsafe extern "C" fn(f: int, x: u8) = foo;
//~^ ERROR: mismatched types: expected `unsafe extern "C" fn(int, u8)`
// but found `unsafe extern "C" fn(int, u8, ...)`
// (expected non-variadic fn but found variadic function)
let y: unsafe extern "C" fn(f: int, x: u8, ...) = bar;
//~^ ERROR: mismatched types: expected `unsafe extern "C" fn(int, u8, ...)`
// but found `extern "C" extern fn(int, u8)`
// (expected variadic fn but found non-variadic function)
foo(1, 2, 3f32); //~ ERROR: can't pass an f32 to variadic function, cast to c_double
foo(1, 2, true); //~ ERROR: can't pass bool to variadic function, cast to c_int
foo(1, 2, 1i8); //~ ERROR: can't pass i8 to variadic function, cast to c_int
foo(1, 2, 1u8); //~ ERROR: can't pass u8 to variadic function, cast to c_uint
foo(1, 2, 1i16); //~ ERROR: can't pass i16 to variadic function, cast to c_int | identifier_body |
variadic-ffi.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern "stdcall" {
fn printf(_: *const u8,...); //~ ERROR: variadic function must have C calling convention
}
extern {
fn foo(f: int, x: u8,...);
}
extern "C" fn | (f: int, x: u8) {}
fn main() {
unsafe {
foo(); //~ ERROR: this function takes at least 2 parameters but 0 parameters were supplied
foo(1); //~ ERROR: this function takes at least 2 parameters but 1 parameter was supplied
let x: unsafe extern "C" fn(f: int, x: u8) = foo;
//~^ ERROR: mismatched types: expected `unsafe extern "C" fn(int, u8)`
// but found `unsafe extern "C" fn(int, u8,...)`
// (expected non-variadic fn but found variadic function)
let y: unsafe extern "C" fn(f: int, x: u8,...) = bar;
//~^ ERROR: mismatched types: expected `unsafe extern "C" fn(int, u8,...)`
// but found `extern "C" extern fn(int, u8)`
// (expected variadic fn but found non-variadic function)
foo(1, 2, 3f32); //~ ERROR: can't pass an f32 to variadic function, cast to c_double
foo(1, 2, true); //~ ERROR: can't pass bool to variadic function, cast to c_int
foo(1, 2, 1i8); //~ ERROR: can't pass i8 to variadic function, cast to c_int
foo(1, 2, 1u8); //~ ERROR: can't pass u8 to variadic function, cast to c_uint
foo(1, 2, 1i16); //~ ERROR: can't pass i16 to variadic function, cast to c_int
foo(1, 2, 1u16); //~ ERROR: can't pass u16 to variadic function, cast to c_uint
}
}
| bar | identifier_name |
variadic-ffi.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern "stdcall" {
fn printf(_: *const u8,...); //~ ERROR: variadic function must have C calling convention
}
extern {
fn foo(f: int, x: u8,...);
}
extern "C" fn bar(f: int, x: u8) {}
fn main() {
unsafe {
foo(); //~ ERROR: this function takes at least 2 parameters but 0 parameters were supplied
foo(1); //~ ERROR: this function takes at least 2 parameters but 1 parameter was supplied
| let y: unsafe extern "C" fn(f: int, x: u8,...) = bar;
//~^ ERROR: mismatched types: expected `unsafe extern "C" fn(int, u8,...)`
// but found `extern "C" extern fn(int, u8)`
// (expected variadic fn but found non-variadic function)
foo(1, 2, 3f32); //~ ERROR: can't pass an f32 to variadic function, cast to c_double
foo(1, 2, true); //~ ERROR: can't pass bool to variadic function, cast to c_int
foo(1, 2, 1i8); //~ ERROR: can't pass i8 to variadic function, cast to c_int
foo(1, 2, 1u8); //~ ERROR: can't pass u8 to variadic function, cast to c_uint
foo(1, 2, 1i16); //~ ERROR: can't pass i16 to variadic function, cast to c_int
foo(1, 2, 1u16); //~ ERROR: can't pass u16 to variadic function, cast to c_uint
}
} | let x: unsafe extern "C" fn(f: int, x: u8) = foo;
//~^ ERROR: mismatched types: expected `unsafe extern "C" fn(int, u8)`
// but found `unsafe extern "C" fn(int, u8, ...)`
// (expected non-variadic fn but found variadic function)
| random_line_split |
tests.rs | use super::super::navigate;
use super::*;
use crate::fmt::Debug;
use crate::string::String;
impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
// Asserts that the back pointer in each reachable node points to its parent.
pub fn assert_back_pointers(self) {
if let ForceResult::Internal(node) = self.force() {
for idx in 0..=node.len() {
let edge = unsafe { Handle::new_edge(node, idx) };
let child = edge.descend();
assert!(child.ascend().ok() == Some(edge));
child.assert_back_pointers();
}
}
}
// Renders a multi-line display of the keys in order and in tree hierarchy,
// picturing the tree growing sideways from its root on the left to its
// leaves on the right.
pub fn dump_keys(self) -> String
where
K: Debug,
{
let mut result = String::new();
self.visit_nodes_in_order(|pos| match pos {
navigate::Position::Leaf(leaf) => {
let depth = self.height();
let indent = " ".repeat(depth);
result += &format!("\n{}{:?}", indent, leaf.keys());
}
navigate::Position::Internal(_) => {}
navigate::Position::InternalKV(kv) => {
let depth = self.height() - kv.into_node().height();
let indent = " ".repeat(depth);
result += &format!("\n{}{:?}", indent, kv.into_kv().0);
}
});
result
}
}
#[test]
fn test_splitpoint() | }
}
#[test]
fn test_partial_eq() {
let mut root1 = NodeRef::new_leaf();
root1.borrow_mut().push(1, ());
let mut root1 = NodeRef::new_internal(root1.forget_type()).forget_type();
let root2 = Root::new();
root1.reborrow().assert_back_pointers();
root2.reborrow().assert_back_pointers();
let leaf_edge_1a = root1.reborrow().first_leaf_edge().forget_node_type();
let leaf_edge_1b = root1.reborrow().last_leaf_edge().forget_node_type();
let top_edge_1 = root1.reborrow().first_edge();
let top_edge_2 = root2.reborrow().first_edge();
assert!(leaf_edge_1a == leaf_edge_1a);
assert!(leaf_edge_1a!= leaf_edge_1b);
assert!(leaf_edge_1a!= top_edge_1);
assert!(leaf_edge_1a!= top_edge_2);
assert!(top_edge_1 == top_edge_1);
assert!(top_edge_1!= top_edge_2);
root1.pop_internal_level();
unsafe { root1.into_dying().deallocate_and_ascend() };
unsafe { root2.into_dying().deallocate_and_ascend() };
}
#[test]
#[cfg(target_arch = "x86_64")]
fn test_sizes() {
assert_eq!(core::mem::size_of::<LeafNode<(), ()>>(), 16);
assert_eq!(core::mem::size_of::<LeafNode<i64, i64>>(), 16 + CAPACITY * 2 * 8);
assert_eq!(core::mem::size_of::<InternalNode<(), ()>>(), 16 + (CAPACITY + 1) * 8);
assert_eq!(core::mem::size_of::<InternalNode<i64, i64>>(), 16 + (CAPACITY * 3 + 1) * 8);
}
| {
for idx in 0..=CAPACITY {
let (middle_kv_idx, insertion) = splitpoint(idx);
// Simulate performing the split:
let mut left_len = middle_kv_idx;
let mut right_len = CAPACITY - middle_kv_idx - 1;
match insertion {
LeftOrRight::Left(edge_idx) => {
assert!(edge_idx <= left_len);
left_len += 1;
}
LeftOrRight::Right(edge_idx) => {
assert!(edge_idx <= right_len);
right_len += 1;
}
}
assert!(left_len >= MIN_LEN_AFTER_SPLIT);
assert!(right_len >= MIN_LEN_AFTER_SPLIT);
assert!(left_len + right_len == CAPACITY); | identifier_body |
tests.rs | use super::super::navigate;
use super::*;
use crate::fmt::Debug;
use crate::string::String;
impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
// Asserts that the back pointer in each reachable node points to its parent.
pub fn assert_back_pointers(self) {
if let ForceResult::Internal(node) = self.force() {
for idx in 0..=node.len() {
let edge = unsafe { Handle::new_edge(node, idx) };
let child = edge.descend();
assert!(child.ascend().ok() == Some(edge));
child.assert_back_pointers();
}
}
}
// Renders a multi-line display of the keys in order and in tree hierarchy,
// picturing the tree growing sideways from its root on the left to its
// leaves on the right.
pub fn dump_keys(self) -> String
where
K: Debug,
{
let mut result = String::new();
self.visit_nodes_in_order(|pos| match pos {
navigate::Position::Leaf(leaf) => |
navigate::Position::Internal(_) => {}
navigate::Position::InternalKV(kv) => {
let depth = self.height() - kv.into_node().height();
let indent = " ".repeat(depth);
result += &format!("\n{}{:?}", indent, kv.into_kv().0);
}
});
result
}
}
#[test]
fn test_splitpoint() {
for idx in 0..=CAPACITY {
let (middle_kv_idx, insertion) = splitpoint(idx);
// Simulate performing the split:
let mut left_len = middle_kv_idx;
let mut right_len = CAPACITY - middle_kv_idx - 1;
match insertion {
LeftOrRight::Left(edge_idx) => {
assert!(edge_idx <= left_len);
left_len += 1;
}
LeftOrRight::Right(edge_idx) => {
assert!(edge_idx <= right_len);
right_len += 1;
}
}
assert!(left_len >= MIN_LEN_AFTER_SPLIT);
assert!(right_len >= MIN_LEN_AFTER_SPLIT);
assert!(left_len + right_len == CAPACITY);
}
}
#[test]
fn test_partial_eq() {
let mut root1 = NodeRef::new_leaf();
root1.borrow_mut().push(1, ());
let mut root1 = NodeRef::new_internal(root1.forget_type()).forget_type();
let root2 = Root::new();
root1.reborrow().assert_back_pointers();
root2.reborrow().assert_back_pointers();
let leaf_edge_1a = root1.reborrow().first_leaf_edge().forget_node_type();
let leaf_edge_1b = root1.reborrow().last_leaf_edge().forget_node_type();
let top_edge_1 = root1.reborrow().first_edge();
let top_edge_2 = root2.reborrow().first_edge();
assert!(leaf_edge_1a == leaf_edge_1a);
assert!(leaf_edge_1a!= leaf_edge_1b);
assert!(leaf_edge_1a!= top_edge_1);
assert!(leaf_edge_1a!= top_edge_2);
assert!(top_edge_1 == top_edge_1);
assert!(top_edge_1!= top_edge_2);
root1.pop_internal_level();
unsafe { root1.into_dying().deallocate_and_ascend() };
unsafe { root2.into_dying().deallocate_and_ascend() };
}
#[test]
#[cfg(target_arch = "x86_64")]
fn test_sizes() {
assert_eq!(core::mem::size_of::<LeafNode<(), ()>>(), 16);
assert_eq!(core::mem::size_of::<LeafNode<i64, i64>>(), 16 + CAPACITY * 2 * 8);
assert_eq!(core::mem::size_of::<InternalNode<(), ()>>(), 16 + (CAPACITY + 1) * 8);
assert_eq!(core::mem::size_of::<InternalNode<i64, i64>>(), 16 + (CAPACITY * 3 + 1) * 8);
}
| {
let depth = self.height();
let indent = " ".repeat(depth);
result += &format!("\n{}{:?}", indent, leaf.keys());
} | conditional_block |
tests.rs | use super::super::navigate;
use super::*;
use crate::fmt::Debug;
use crate::string::String;
impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
// Asserts that the back pointer in each reachable node points to its parent.
pub fn assert_back_pointers(self) {
if let ForceResult::Internal(node) = self.force() {
for idx in 0..=node.len() {
let edge = unsafe { Handle::new_edge(node, idx) };
let child = edge.descend();
assert!(child.ascend().ok() == Some(edge));
child.assert_back_pointers();
}
}
}
// Renders a multi-line display of the keys in order and in tree hierarchy,
// picturing the tree growing sideways from its root on the left to its
// leaves on the right.
pub fn dump_keys(self) -> String
where
K: Debug,
{
let mut result = String::new();
self.visit_nodes_in_order(|pos| match pos {
navigate::Position::Leaf(leaf) => {
let depth = self.height();
let indent = " ".repeat(depth);
result += &format!("\n{}{:?}", indent, leaf.keys());
}
navigate::Position::Internal(_) => {}
navigate::Position::InternalKV(kv) => {
let depth = self.height() - kv.into_node().height();
let indent = " ".repeat(depth);
result += &format!("\n{}{:?}", indent, kv.into_kv().0);
}
});
result
}
}
#[test]
fn test_splitpoint() {
for idx in 0..=CAPACITY {
let (middle_kv_idx, insertion) = splitpoint(idx);
// Simulate performing the split:
let mut left_len = middle_kv_idx;
let mut right_len = CAPACITY - middle_kv_idx - 1;
match insertion {
LeftOrRight::Left(edge_idx) => {
assert!(edge_idx <= left_len);
left_len += 1;
}
LeftOrRight::Right(edge_idx) => {
assert!(edge_idx <= right_len);
right_len += 1;
}
}
assert!(left_len >= MIN_LEN_AFTER_SPLIT);
assert!(right_len >= MIN_LEN_AFTER_SPLIT);
assert!(left_len + right_len == CAPACITY);
}
}
#[test]
fn test_partial_eq() {
let mut root1 = NodeRef::new_leaf();
root1.borrow_mut().push(1, ());
let mut root1 = NodeRef::new_internal(root1.forget_type()).forget_type();
let root2 = Root::new();
root1.reborrow().assert_back_pointers();
root2.reborrow().assert_back_pointers();
let leaf_edge_1a = root1.reborrow().first_leaf_edge().forget_node_type();
let leaf_edge_1b = root1.reborrow().last_leaf_edge().forget_node_type();
let top_edge_1 = root1.reborrow().first_edge();
let top_edge_2 = root2.reborrow().first_edge();
assert!(leaf_edge_1a == leaf_edge_1a);
assert!(leaf_edge_1a!= leaf_edge_1b);
assert!(leaf_edge_1a!= top_edge_1);
assert!(leaf_edge_1a!= top_edge_2);
assert!(top_edge_1 == top_edge_1);
assert!(top_edge_1!= top_edge_2);
root1.pop_internal_level();
unsafe { root1.into_dying().deallocate_and_ascend() };
unsafe { root2.into_dying().deallocate_and_ascend() };
}
#[test]
#[cfg(target_arch = "x86_64")]
fn | () {
assert_eq!(core::mem::size_of::<LeafNode<(), ()>>(), 16);
assert_eq!(core::mem::size_of::<LeafNode<i64, i64>>(), 16 + CAPACITY * 2 * 8);
assert_eq!(core::mem::size_of::<InternalNode<(), ()>>(), 16 + (CAPACITY + 1) * 8);
assert_eq!(core::mem::size_of::<InternalNode<i64, i64>>(), 16 + (CAPACITY * 3 + 1) * 8);
}
| test_sizes | identifier_name |
tests.rs | use super::super::navigate;
use super::*;
use crate::fmt::Debug;
use crate::string::String;
impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
// Asserts that the back pointer in each reachable node points to its parent.
pub fn assert_back_pointers(self) {
if let ForceResult::Internal(node) = self.force() {
for idx in 0..=node.len() {
let edge = unsafe { Handle::new_edge(node, idx) };
let child = edge.descend();
assert!(child.ascend().ok() == Some(edge));
child.assert_back_pointers();
}
} | }
// Renders a multi-line display of the keys in order and in tree hierarchy,
// picturing the tree growing sideways from its root on the left to its
// leaves on the right.
pub fn dump_keys(self) -> String
where
K: Debug,
{
let mut result = String::new();
self.visit_nodes_in_order(|pos| match pos {
navigate::Position::Leaf(leaf) => {
let depth = self.height();
let indent = " ".repeat(depth);
result += &format!("\n{}{:?}", indent, leaf.keys());
}
navigate::Position::Internal(_) => {}
navigate::Position::InternalKV(kv) => {
let depth = self.height() - kv.into_node().height();
let indent = " ".repeat(depth);
result += &format!("\n{}{:?}", indent, kv.into_kv().0);
}
});
result
}
}
#[test]
fn test_splitpoint() {
for idx in 0..=CAPACITY {
let (middle_kv_idx, insertion) = splitpoint(idx);
// Simulate performing the split:
let mut left_len = middle_kv_idx;
let mut right_len = CAPACITY - middle_kv_idx - 1;
match insertion {
LeftOrRight::Left(edge_idx) => {
assert!(edge_idx <= left_len);
left_len += 1;
}
LeftOrRight::Right(edge_idx) => {
assert!(edge_idx <= right_len);
right_len += 1;
}
}
assert!(left_len >= MIN_LEN_AFTER_SPLIT);
assert!(right_len >= MIN_LEN_AFTER_SPLIT);
assert!(left_len + right_len == CAPACITY);
}
}
#[test]
fn test_partial_eq() {
let mut root1 = NodeRef::new_leaf();
root1.borrow_mut().push(1, ());
let mut root1 = NodeRef::new_internal(root1.forget_type()).forget_type();
let root2 = Root::new();
root1.reborrow().assert_back_pointers();
root2.reborrow().assert_back_pointers();
let leaf_edge_1a = root1.reborrow().first_leaf_edge().forget_node_type();
let leaf_edge_1b = root1.reborrow().last_leaf_edge().forget_node_type();
let top_edge_1 = root1.reborrow().first_edge();
let top_edge_2 = root2.reborrow().first_edge();
assert!(leaf_edge_1a == leaf_edge_1a);
assert!(leaf_edge_1a!= leaf_edge_1b);
assert!(leaf_edge_1a!= top_edge_1);
assert!(leaf_edge_1a!= top_edge_2);
assert!(top_edge_1 == top_edge_1);
assert!(top_edge_1!= top_edge_2);
root1.pop_internal_level();
unsafe { root1.into_dying().deallocate_and_ascend() };
unsafe { root2.into_dying().deallocate_and_ascend() };
}
#[test]
#[cfg(target_arch = "x86_64")]
fn test_sizes() {
assert_eq!(core::mem::size_of::<LeafNode<(), ()>>(), 16);
assert_eq!(core::mem::size_of::<LeafNode<i64, i64>>(), 16 + CAPACITY * 2 * 8);
assert_eq!(core::mem::size_of::<InternalNode<(), ()>>(), 16 + (CAPACITY + 1) * 8);
assert_eq!(core::mem::size_of::<InternalNode<i64, i64>>(), 16 + (CAPACITY * 3 + 1) * 8);
} | random_line_split |
|
primitive.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use crate::lexer::TokenKind;
use common::{SourceLocationKey, Span, WithLocation};
use interner::StringKey;
use std::cmp::Ordering;
use std::fmt;
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Token {
pub span: Span,
pub kind: TokenKind,
}
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct Identifier {
pub span: Span,
pub token: Token,
pub value: StringKey,
}
impl fmt::Display for Identifier {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_fmt(format_args!("{}", self.value))
}
}
impl Ord for Identifier {
fn cmp(&self, other: &Self) -> Ordering {
self.value.cmp(&other.value)
}
}
impl PartialOrd for Identifier {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Identifier {
pub fn name_with_location(&self, file: SourceLocationKey) -> WithLocation<StringKey> {
WithLocation::from_span(file, self.span, self.value)
}
}
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct List<T> {
pub span: Span, | impl<T> List<T> {
pub fn generated(items: Vec<T>) -> Self {
Self {
span: Span::empty(),
start: Token {
span: Span::empty(),
kind: TokenKind::OpenBrace,
},
items,
end: Token {
span: Span::empty(),
kind: TokenKind::CloseBrace,
},
}
}
} | pub start: Token,
pub items: Vec<T>,
pub end: Token,
}
| random_line_split |
primitive.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use crate::lexer::TokenKind;
use common::{SourceLocationKey, Span, WithLocation};
use interner::StringKey;
use std::cmp::Ordering;
use std::fmt;
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Token {
pub span: Span,
pub kind: TokenKind,
}
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct Identifier {
pub span: Span,
pub token: Token,
pub value: StringKey,
}
impl fmt::Display for Identifier {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_fmt(format_args!("{}", self.value))
}
}
impl Ord for Identifier {
fn cmp(&self, other: &Self) -> Ordering {
self.value.cmp(&other.value)
}
}
impl PartialOrd for Identifier {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Identifier {
pub fn name_with_location(&self, file: SourceLocationKey) -> WithLocation<StringKey> {
WithLocation::from_span(file, self.span, self.value)
}
}
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct List<T> {
pub span: Span,
pub start: Token,
pub items: Vec<T>,
pub end: Token,
}
impl<T> List<T> {
pub fn generated(items: Vec<T>) -> Self |
}
| {
Self {
span: Span::empty(),
start: Token {
span: Span::empty(),
kind: TokenKind::OpenBrace,
},
items,
end: Token {
span: Span::empty(),
kind: TokenKind::CloseBrace,
},
}
} | identifier_body |
primitive.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use crate::lexer::TokenKind;
use common::{SourceLocationKey, Span, WithLocation};
use interner::StringKey;
use std::cmp::Ordering;
use std::fmt;
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Token {
pub span: Span,
pub kind: TokenKind,
}
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct Identifier {
pub span: Span,
pub token: Token,
pub value: StringKey,
}
impl fmt::Display for Identifier {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_fmt(format_args!("{}", self.value))
}
}
impl Ord for Identifier {
fn cmp(&self, other: &Self) -> Ordering {
self.value.cmp(&other.value)
}
}
impl PartialOrd for Identifier {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Identifier {
pub fn name_with_location(&self, file: SourceLocationKey) -> WithLocation<StringKey> {
WithLocation::from_span(file, self.span, self.value)
}
}
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct | <T> {
pub span: Span,
pub start: Token,
pub items: Vec<T>,
pub end: Token,
}
impl<T> List<T> {
pub fn generated(items: Vec<T>) -> Self {
Self {
span: Span::empty(),
start: Token {
span: Span::empty(),
kind: TokenKind::OpenBrace,
},
items,
end: Token {
span: Span::empty(),
kind: TokenKind::CloseBrace,
},
}
}
}
| List | identifier_name |
syslog.rs | // Copyright 2015 click2stream, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Syslog definitions.
use std::ptr;
use std::ffi::CString;
use std::fmt::Arguments;
use std::os::raw::{c_char, c_int, c_void};
use std::sync::Once;
use crate::utils::logger::{Logger, Severity};
const LOG_PID: c_int = 0x01;
const LOG_CONS: c_int = 0x02;
const LOG_USER: c_int = 0x08;
const LOG_ERR: c_int = 3;
const LOG_WARNING: c_int = 4;
const LOG_INFO: c_int = 6;
const LOG_DEBUG: c_int = 7;
static SYSLOG_INIT: Once = Once::new();
#[link(name = "c")]
extern "C" {
fn openlog(ident: *const c_char, option: c_int, facility: c_int) -> c_void;
fn syslog(priority: c_int, format: *const c_char,...) -> c_void;
}
/// Syslog logger structure.
#[derive(Debug, Clone)]
pub struct Syslog {
level: Severity,
}
impl Syslog {
/// Create a new syslog logger with log level set to INFO.
pub fn new() -> Self {
Self::default()
}
}
impl Default for Syslog {
fn default() -> Self {
SYSLOG_INIT.call_once(|| unsafe {
openlog(ptr::null(), LOG_CONS | LOG_PID, LOG_USER);
});
Self {
level: Severity::INFO,
}
}
}
impl Logger for Syslog {
fn log(&mut self, file: &str, line: u32, s: Severity, msg: Arguments) {
let msg = format!("[{}:{}] {}", file, line, msg);
let cstr_fmt = CString::new("%s").unwrap();
let cstr_msg = CString::new(msg).unwrap();
let fmt_ptr = cstr_fmt.as_ptr() as *const c_char;
let msg_ptr = cstr_msg.as_ptr() as *const c_char;
if s >= self.level {
unsafe {
match s {
Severity::DEBUG => syslog(LOG_DEBUG, fmt_ptr, msg_ptr),
Severity::INFO => syslog(LOG_INFO, fmt_ptr, msg_ptr),
Severity::WARN => syslog(LOG_WARNING, fmt_ptr, msg_ptr),
Severity::ERROR => syslog(LOG_ERR, fmt_ptr, msg_ptr),
}
};
}
}
fn set_level(&mut self, s: Severity) { | fn get_level(&self) -> Severity {
self.level
}
} | self.level = s;
}
| random_line_split |
syslog.rs | // Copyright 2015 click2stream, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Syslog definitions.
use std::ptr;
use std::ffi::CString;
use std::fmt::Arguments;
use std::os::raw::{c_char, c_int, c_void};
use std::sync::Once;
use crate::utils::logger::{Logger, Severity};
const LOG_PID: c_int = 0x01;
const LOG_CONS: c_int = 0x02;
const LOG_USER: c_int = 0x08;
const LOG_ERR: c_int = 3;
const LOG_WARNING: c_int = 4;
const LOG_INFO: c_int = 6;
const LOG_DEBUG: c_int = 7;
static SYSLOG_INIT: Once = Once::new();
#[link(name = "c")]
extern "C" {
fn openlog(ident: *const c_char, option: c_int, facility: c_int) -> c_void;
fn syslog(priority: c_int, format: *const c_char,...) -> c_void;
}
/// Syslog logger structure.
#[derive(Debug, Clone)]
pub struct Syslog {
level: Severity,
}
impl Syslog {
/// Create a new syslog logger with log level set to INFO.
pub fn new() -> Self {
Self::default()
}
}
impl Default for Syslog {
fn default() -> Self |
}
impl Logger for Syslog {
fn log(&mut self, file: &str, line: u32, s: Severity, msg: Arguments) {
let msg = format!("[{}:{}] {}", file, line, msg);
let cstr_fmt = CString::new("%s").unwrap();
let cstr_msg = CString::new(msg).unwrap();
let fmt_ptr = cstr_fmt.as_ptr() as *const c_char;
let msg_ptr = cstr_msg.as_ptr() as *const c_char;
if s >= self.level {
unsafe {
match s {
Severity::DEBUG => syslog(LOG_DEBUG, fmt_ptr, msg_ptr),
Severity::INFO => syslog(LOG_INFO, fmt_ptr, msg_ptr),
Severity::WARN => syslog(LOG_WARNING, fmt_ptr, msg_ptr),
Severity::ERROR => syslog(LOG_ERR, fmt_ptr, msg_ptr),
}
};
}
}
fn set_level(&mut self, s: Severity) {
self.level = s;
}
fn get_level(&self) -> Severity {
self.level
}
}
| {
SYSLOG_INIT.call_once(|| unsafe {
openlog(ptr::null(), LOG_CONS | LOG_PID, LOG_USER);
});
Self {
level: Severity::INFO,
}
} | identifier_body |
syslog.rs | // Copyright 2015 click2stream, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Syslog definitions.
use std::ptr;
use std::ffi::CString;
use std::fmt::Arguments;
use std::os::raw::{c_char, c_int, c_void};
use std::sync::Once;
use crate::utils::logger::{Logger, Severity};
const LOG_PID: c_int = 0x01;
const LOG_CONS: c_int = 0x02;
const LOG_USER: c_int = 0x08;
const LOG_ERR: c_int = 3;
const LOG_WARNING: c_int = 4;
const LOG_INFO: c_int = 6;
const LOG_DEBUG: c_int = 7;
static SYSLOG_INIT: Once = Once::new();
#[link(name = "c")]
extern "C" {
fn openlog(ident: *const c_char, option: c_int, facility: c_int) -> c_void;
fn syslog(priority: c_int, format: *const c_char,...) -> c_void;
}
/// Syslog logger structure.
#[derive(Debug, Clone)]
pub struct Syslog {
level: Severity,
}
impl Syslog {
/// Create a new syslog logger with log level set to INFO.
pub fn | () -> Self {
Self::default()
}
}
impl Default for Syslog {
fn default() -> Self {
SYSLOG_INIT.call_once(|| unsafe {
openlog(ptr::null(), LOG_CONS | LOG_PID, LOG_USER);
});
Self {
level: Severity::INFO,
}
}
}
impl Logger for Syslog {
fn log(&mut self, file: &str, line: u32, s: Severity, msg: Arguments) {
let msg = format!("[{}:{}] {}", file, line, msg);
let cstr_fmt = CString::new("%s").unwrap();
let cstr_msg = CString::new(msg).unwrap();
let fmt_ptr = cstr_fmt.as_ptr() as *const c_char;
let msg_ptr = cstr_msg.as_ptr() as *const c_char;
if s >= self.level {
unsafe {
match s {
Severity::DEBUG => syslog(LOG_DEBUG, fmt_ptr, msg_ptr),
Severity::INFO => syslog(LOG_INFO, fmt_ptr, msg_ptr),
Severity::WARN => syslog(LOG_WARNING, fmt_ptr, msg_ptr),
Severity::ERROR => syslog(LOG_ERR, fmt_ptr, msg_ptr),
}
};
}
}
fn set_level(&mut self, s: Severity) {
self.level = s;
}
fn get_level(&self) -> Severity {
self.level
}
}
| new | identifier_name |
aesdec.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn aesdec_1() {
run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM6)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 230], OperandSize::Dword)
} |
fn aesdec_3() {
run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM5)), operand2: Some(Direct(XMM0)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 232], OperandSize::Qword)
}
fn aesdec_4() {
run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM3)), operand2: Some(IndirectScaledIndexed(RDX, RSI, Eight, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 28, 242], OperandSize::Qword)
} |
fn aesdec_2() {
run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM3)), operand2: Some(IndirectScaledDisplaced(ECX, Eight, 454573889, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 28, 205, 65, 63, 24, 27], OperandSize::Dword)
} | random_line_split |
aesdec.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn aesdec_1() {
run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM6)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 230], OperandSize::Dword)
}
fn aesdec_2() |
fn aesdec_3() {
run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM5)), operand2: Some(Direct(XMM0)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 232], OperandSize::Qword)
}
fn aesdec_4() {
run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM3)), operand2: Some(IndirectScaledIndexed(RDX, RSI, Eight, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 28, 242], OperandSize::Qword)
}
| {
run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM3)), operand2: Some(IndirectScaledDisplaced(ECX, Eight, 454573889, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 28, 205, 65, 63, 24, 27], OperandSize::Dword)
} | identifier_body |
aesdec.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn aesdec_1() {
run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM6)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 230], OperandSize::Dword)
}
fn aesdec_2() {
run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM3)), operand2: Some(IndirectScaledDisplaced(ECX, Eight, 454573889, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 28, 205, 65, 63, 24, 27], OperandSize::Dword)
}
fn | () {
run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM5)), operand2: Some(Direct(XMM0)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 232], OperandSize::Qword)
}
fn aesdec_4() {
run_test(&Instruction { mnemonic: Mnemonic::AESDEC, operand1: Some(Direct(XMM3)), operand2: Some(IndirectScaledIndexed(RDX, RSI, Eight, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 56, 222, 28, 242], OperandSize::Qword)
}
| aesdec_3 | identifier_name |
lint-ctypes.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(improper_ctypes)]
#![feature(rustc_private)]
#![allow(private_in_public)]
extern crate libc;
use std::marker::PhantomData;
trait Mirror { type It:?Sized; }
impl<T:?Sized> Mirror for T { type It = Self; }
#[repr(C)]
pub struct StructWithProjection(*mut <StructWithProjection as Mirror>::It);
#[repr(C)]
pub struct StructWithProjectionAndLifetime<'a>(
&'a mut <StructWithProjectionAndLifetime<'a> as Mirror>::It
);
pub type I32Pair = (i32, i32);
#[repr(C)]
pub struct ZeroSize;
pub type RustFn = fn();
pub type RustBadRet = extern fn() -> Box<u32>;
pub type CVoidRet = ();
pub struct Foo;
#[repr(transparent)]
pub struct TransparentI128(i128);
#[repr(transparent)]
pub struct TransparentStr(&'static str);
#[repr(transparent)]
pub struct TransparentBadFn(RustBadRet);
#[repr(transparent)]
pub struct TransparentInt(u32);
#[repr(transparent)]
pub struct TransparentRef<'a>(&'a TransparentInt);
#[repr(transparent)]
pub struct TransparentLifetime<'a>(*const u8, PhantomData<&'a ()>);
#[repr(transparent)]
pub struct TransparentUnit<U>(f32, PhantomData<U>);
#[repr(transparent)]
pub struct TransparentCustomZst(i32, ZeroSize);
#[repr(C)]
pub struct ZeroSizeWithPhantomData(::std::marker::PhantomData<i32>);
extern {
pub fn ptr_type1(size: *const Foo); //~ ERROR: uses type `Foo`
pub fn ptr_type2(size: *const Foo); //~ ERROR: uses type `Foo`
pub fn slice_type(p: &[u32]); //~ ERROR: uses type `[u32]`
pub fn str_type(p: &str); //~ ERROR: uses type `str`
pub fn box_type(p: Box<u32>); //~ ERROR uses type `std::boxed::Box<u32>`
pub fn char_type(p: char); //~ ERROR uses type `char`
pub fn i128_type(p: i128); //~ ERROR uses type `i128`
pub fn u128_type(p: u128); //~ ERROR uses type `u128`
pub fn trait_type(p: &Clone); //~ ERROR uses type `dyn std::clone::Clone`
pub fn tuple_type(p: (i32, i32)); //~ ERROR uses type `(i32, i32)`
pub fn tuple_type2(p: I32Pair); //~ ERROR uses type `(i32, i32)`
pub fn zero_size(p: ZeroSize); //~ ERROR struct has no fields
pub fn zero_size_phantom(p: ZeroSizeWithPhantomData); //~ ERROR composed only of PhantomData
pub fn zero_size_phantom_toplevel()
-> ::std::marker::PhantomData<bool>; //~ ERROR: composed only of PhantomData
pub fn fn_type(p: RustFn); //~ ERROR function pointer has Rust-specific
pub fn fn_type2(p: fn()); //~ ERROR function pointer has Rust-specific
pub fn fn_contained(p: RustBadRet); //~ ERROR: uses type `std::boxed::Box<u32>`
pub fn transparent_i128(p: TransparentI128); //~ ERROR: uses type `i128`
pub fn transparent_str(p: TransparentStr); //~ ERROR: uses type `str`
pub fn transparent_fn(p: TransparentBadFn); //~ ERROR: uses type `std::boxed::Box<u32>`
pub fn good3(fptr: Option<extern fn()>);
pub fn good4(aptr: &[u8; 4 as usize]);
pub fn good5(s: StructWithProjection);
pub fn good6(s: StructWithProjectionAndLifetime);
pub fn good7(fptr: extern fn() -> ());
pub fn good8(fptr: extern fn() ->!);
pub fn good9() -> ();
pub fn good10() -> CVoidRet;
pub fn good11(size: isize);
pub fn good12(size: usize);
pub fn good13(n: TransparentInt);
pub fn good14(p: TransparentRef);
pub fn good15(p: TransparentLifetime);
pub fn good16(p: TransparentUnit<ZeroSize>);
pub fn good17(p: TransparentCustomZst);
#[allow(improper_ctypes)]
pub fn good18(_: &String);
}
#[allow(improper_ctypes)]
extern {
pub fn good19(_: &String);
}
#[cfg(not(target_arch = "wasm32"))]
extern {
pub fn good1(size: *const libc::c_int);
pub fn good2(size: *const libc::c_uint);
}
fn main() | {
} | identifier_body |
|
lint-ctypes.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(improper_ctypes)]
#![feature(rustc_private)]
#![allow(private_in_public)]
extern crate libc;
use std::marker::PhantomData;
trait Mirror { type It:?Sized; }
impl<T:?Sized> Mirror for T { type It = Self; }
#[repr(C)]
pub struct StructWithProjection(*mut <StructWithProjection as Mirror>::It);
#[repr(C)]
pub struct StructWithProjectionAndLifetime<'a>(
&'a mut <StructWithProjectionAndLifetime<'a> as Mirror>::It
);
pub type I32Pair = (i32, i32);
#[repr(C)]
pub struct ZeroSize;
pub type RustFn = fn(); | #[repr(transparent)]
pub struct TransparentI128(i128);
#[repr(transparent)]
pub struct TransparentStr(&'static str);
#[repr(transparent)]
pub struct TransparentBadFn(RustBadRet);
#[repr(transparent)]
pub struct TransparentInt(u32);
#[repr(transparent)]
pub struct TransparentRef<'a>(&'a TransparentInt);
#[repr(transparent)]
pub struct TransparentLifetime<'a>(*const u8, PhantomData<&'a ()>);
#[repr(transparent)]
pub struct TransparentUnit<U>(f32, PhantomData<U>);
#[repr(transparent)]
pub struct TransparentCustomZst(i32, ZeroSize);
#[repr(C)]
pub struct ZeroSizeWithPhantomData(::std::marker::PhantomData<i32>);
extern {
pub fn ptr_type1(size: *const Foo); //~ ERROR: uses type `Foo`
pub fn ptr_type2(size: *const Foo); //~ ERROR: uses type `Foo`
pub fn slice_type(p: &[u32]); //~ ERROR: uses type `[u32]`
pub fn str_type(p: &str); //~ ERROR: uses type `str`
pub fn box_type(p: Box<u32>); //~ ERROR uses type `std::boxed::Box<u32>`
pub fn char_type(p: char); //~ ERROR uses type `char`
pub fn i128_type(p: i128); //~ ERROR uses type `i128`
pub fn u128_type(p: u128); //~ ERROR uses type `u128`
pub fn trait_type(p: &Clone); //~ ERROR uses type `dyn std::clone::Clone`
pub fn tuple_type(p: (i32, i32)); //~ ERROR uses type `(i32, i32)`
pub fn tuple_type2(p: I32Pair); //~ ERROR uses type `(i32, i32)`
pub fn zero_size(p: ZeroSize); //~ ERROR struct has no fields
pub fn zero_size_phantom(p: ZeroSizeWithPhantomData); //~ ERROR composed only of PhantomData
pub fn zero_size_phantom_toplevel()
-> ::std::marker::PhantomData<bool>; //~ ERROR: composed only of PhantomData
pub fn fn_type(p: RustFn); //~ ERROR function pointer has Rust-specific
pub fn fn_type2(p: fn()); //~ ERROR function pointer has Rust-specific
pub fn fn_contained(p: RustBadRet); //~ ERROR: uses type `std::boxed::Box<u32>`
pub fn transparent_i128(p: TransparentI128); //~ ERROR: uses type `i128`
pub fn transparent_str(p: TransparentStr); //~ ERROR: uses type `str`
pub fn transparent_fn(p: TransparentBadFn); //~ ERROR: uses type `std::boxed::Box<u32>`
pub fn good3(fptr: Option<extern fn()>);
pub fn good4(aptr: &[u8; 4 as usize]);
pub fn good5(s: StructWithProjection);
pub fn good6(s: StructWithProjectionAndLifetime);
pub fn good7(fptr: extern fn() -> ());
pub fn good8(fptr: extern fn() ->!);
pub fn good9() -> ();
pub fn good10() -> CVoidRet;
pub fn good11(size: isize);
pub fn good12(size: usize);
pub fn good13(n: TransparentInt);
pub fn good14(p: TransparentRef);
pub fn good15(p: TransparentLifetime);
pub fn good16(p: TransparentUnit<ZeroSize>);
pub fn good17(p: TransparentCustomZst);
#[allow(improper_ctypes)]
pub fn good18(_: &String);
}
#[allow(improper_ctypes)]
extern {
pub fn good19(_: &String);
}
#[cfg(not(target_arch = "wasm32"))]
extern {
pub fn good1(size: *const libc::c_int);
pub fn good2(size: *const libc::c_uint);
}
fn main() {
} | pub type RustBadRet = extern fn() -> Box<u32>;
pub type CVoidRet = ();
pub struct Foo; | random_line_split |
lint-ctypes.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(improper_ctypes)]
#![feature(rustc_private)]
#![allow(private_in_public)]
extern crate libc;
use std::marker::PhantomData;
trait Mirror { type It:?Sized; }
impl<T:?Sized> Mirror for T { type It = Self; }
#[repr(C)]
pub struct StructWithProjection(*mut <StructWithProjection as Mirror>::It);
#[repr(C)]
pub struct StructWithProjectionAndLifetime<'a>(
&'a mut <StructWithProjectionAndLifetime<'a> as Mirror>::It
);
pub type I32Pair = (i32, i32);
#[repr(C)]
pub struct ZeroSize;
pub type RustFn = fn();
pub type RustBadRet = extern fn() -> Box<u32>;
pub type CVoidRet = ();
pub struct Foo;
#[repr(transparent)]
pub struct TransparentI128(i128);
#[repr(transparent)]
pub struct TransparentStr(&'static str);
#[repr(transparent)]
pub struct TransparentBadFn(RustBadRet);
#[repr(transparent)]
pub struct TransparentInt(u32);
#[repr(transparent)]
pub struct TransparentRef<'a>(&'a TransparentInt);
#[repr(transparent)]
pub struct TransparentLifetime<'a>(*const u8, PhantomData<&'a ()>);
#[repr(transparent)]
pub struct | <U>(f32, PhantomData<U>);
#[repr(transparent)]
pub struct TransparentCustomZst(i32, ZeroSize);
#[repr(C)]
pub struct ZeroSizeWithPhantomData(::std::marker::PhantomData<i32>);
extern {
pub fn ptr_type1(size: *const Foo); //~ ERROR: uses type `Foo`
pub fn ptr_type2(size: *const Foo); //~ ERROR: uses type `Foo`
pub fn slice_type(p: &[u32]); //~ ERROR: uses type `[u32]`
pub fn str_type(p: &str); //~ ERROR: uses type `str`
pub fn box_type(p: Box<u32>); //~ ERROR uses type `std::boxed::Box<u32>`
pub fn char_type(p: char); //~ ERROR uses type `char`
pub fn i128_type(p: i128); //~ ERROR uses type `i128`
pub fn u128_type(p: u128); //~ ERROR uses type `u128`
pub fn trait_type(p: &Clone); //~ ERROR uses type `dyn std::clone::Clone`
pub fn tuple_type(p: (i32, i32)); //~ ERROR uses type `(i32, i32)`
pub fn tuple_type2(p: I32Pair); //~ ERROR uses type `(i32, i32)`
pub fn zero_size(p: ZeroSize); //~ ERROR struct has no fields
pub fn zero_size_phantom(p: ZeroSizeWithPhantomData); //~ ERROR composed only of PhantomData
pub fn zero_size_phantom_toplevel()
-> ::std::marker::PhantomData<bool>; //~ ERROR: composed only of PhantomData
pub fn fn_type(p: RustFn); //~ ERROR function pointer has Rust-specific
pub fn fn_type2(p: fn()); //~ ERROR function pointer has Rust-specific
pub fn fn_contained(p: RustBadRet); //~ ERROR: uses type `std::boxed::Box<u32>`
pub fn transparent_i128(p: TransparentI128); //~ ERROR: uses type `i128`
pub fn transparent_str(p: TransparentStr); //~ ERROR: uses type `str`
pub fn transparent_fn(p: TransparentBadFn); //~ ERROR: uses type `std::boxed::Box<u32>`
pub fn good3(fptr: Option<extern fn()>);
pub fn good4(aptr: &[u8; 4 as usize]);
pub fn good5(s: StructWithProjection);
pub fn good6(s: StructWithProjectionAndLifetime);
pub fn good7(fptr: extern fn() -> ());
pub fn good8(fptr: extern fn() ->!);
pub fn good9() -> ();
pub fn good10() -> CVoidRet;
pub fn good11(size: isize);
pub fn good12(size: usize);
pub fn good13(n: TransparentInt);
pub fn good14(p: TransparentRef);
pub fn good15(p: TransparentLifetime);
pub fn good16(p: TransparentUnit<ZeroSize>);
pub fn good17(p: TransparentCustomZst);
#[allow(improper_ctypes)]
pub fn good18(_: &String);
}
#[allow(improper_ctypes)]
extern {
pub fn good19(_: &String);
}
#[cfg(not(target_arch = "wasm32"))]
extern {
pub fn good1(size: *const libc::c_int);
pub fn good2(size: *const libc::c_uint);
}
fn main() {
}
| TransparentUnit | identifier_name |
c_like.rs | // https://rustbyexample.com/custom_types/enum/c_like.html
// http://rust-lang-ja.org/rust-by-example/custom_types/enum/c_like.html
// An attribute to hide warnings for unused code.
#![allow(dead_code)]
// enum with implicit discriminator (starts at 0)
enum Number {
Zero,
One,
Two,
}
// enum with explicit discriminator
enum Color {
Red = 0xff0000,
Green = 0x00ff00,
Blue = 0x0000ff,
}
fn main() { | println!("roses are #{:06x}", Color::Red as i32);
println!("violets are #{:06x}", Color::Blue as i32);
} | // `enums` can be cast as integers.
println!("zero is {}", Number::Zero as i32);
println!("one is {}", Number::One as i32);
| random_line_split |
c_like.rs | // https://rustbyexample.com/custom_types/enum/c_like.html
// http://rust-lang-ja.org/rust-by-example/custom_types/enum/c_like.html
// An attribute to hide warnings for unused code.
#![allow(dead_code)]
// enum with implicit discriminator (starts at 0)
enum Number {
Zero,
One,
Two,
}
// enum with explicit discriminator
enum | {
Red = 0xff0000,
Green = 0x00ff00,
Blue = 0x0000ff,
}
fn main() {
// `enums` can be cast as integers.
println!("zero is {}", Number::Zero as i32);
println!("one is {}", Number::One as i32);
println!("roses are #{:06x}", Color::Red as i32);
println!("violets are #{:06x}", Color::Blue as i32);
}
| Color | identifier_name |
c_like.rs | // https://rustbyexample.com/custom_types/enum/c_like.html
// http://rust-lang-ja.org/rust-by-example/custom_types/enum/c_like.html
// An attribute to hide warnings for unused code.
#![allow(dead_code)]
// enum with implicit discriminator (starts at 0)
enum Number {
Zero,
One,
Two,
}
// enum with explicit discriminator
enum Color {
Red = 0xff0000,
Green = 0x00ff00,
Blue = 0x0000ff,
}
fn main() | {
// `enums` can be cast as integers.
println!("zero is {}", Number::Zero as i32);
println!("one is {}", Number::One as i32);
println!("roses are #{:06x}", Color::Red as i32);
println!("violets are #{:06x}", Color::Blue as i32);
} | identifier_body |
|
md4.rs | use std::cmp;
use rust_crypto::digest::Digest;
use util::{write_u64_le, write_u32v_le, read_u32v_le};
const DEFAULT_STATE: Md4State = Md4State {
state: [0x67452301_u32, 0xefcdab89, 0x98badcfe, 0x10325476]
};
#[derive(Copy, Clone)]
pub struct Md4 {
len: u64,
blocks: Blocks,
state: Md4State,
}
#[derive(Copy, Clone)]
pub struct Md4State {
pub state: [u32; 4],
}
#[derive(Copy)]
struct Blocks {
block: [u8; 64],
len: u32,
}
impl Md4 {
pub fn new() -> Md4 {
Md4 {
len: 0,
state: DEFAULT_STATE,
blocks: Blocks {
len: 0,
block: [0; 64],
},
}
}
pub fn from_state(len: u64, state: Md4State) -> Md4 |
}
impl Digest for Md4 {
fn input(&mut self, data: &[u8]) {
let len = &mut self.len;
let state = &mut self.state;
self.blocks.input(data, |chunk| {
*len += 64;
state.process(chunk);
});
}
fn result(&mut self, out: &mut [u8]) {
let mut state = self.state;
let bits = (self.len + (self.blocks.len as u64)) << 3;
let mut ml_bytes = [0_u8; 8];
write_u64_le(&mut ml_bytes, bits);
let blocklen = self.blocks.len as usize;
if blocklen < 56 {
self.len += 64;
let mut last = [0_u8; 64];
last[0..blocklen].copy_from_slice(&self.blocks.block[0..blocklen]);
last[blocklen] = 0x80;
last[56..64].copy_from_slice(&ml_bytes);
state.process(&last[0..64]);
} else {
self.len += 128;
let mut last = [0_u8; 128];
last[0..blocklen].copy_from_slice(&self.blocks.block[0..blocklen]);
last[blocklen] = 0x80;
last[120..128].copy_from_slice(&ml_bytes);
state.process(&last[0..64]);
state.process(&last[64..128]);
}
write_u32v_le(out, &state.state);
}
fn reset(&mut self) {
self.state = DEFAULT_STATE;
self.len = 0;
self.blocks.len = 0;
}
fn output_bits(&self) -> usize { 128 }
fn output_bytes(&self) -> usize { 16 }
fn block_size(&self) -> usize { 64 }
}
impl Blocks {
fn input<F>(&mut self, mut input: &[u8], mut f: F) where F: FnMut(&[u8]) {
if self.len > 0 {
let len = self.len as usize;
let amt = cmp::min(input.len(), self.block.len() - len);
self.block[len..len + amt].copy_from_slice(&input[..amt]);
if len + amt == self.block.len() {
f(&self.block);
self.len = 0;
input = &input[amt..];
} else {
self.len += amt as u32;
return
}
}
assert_eq!(self.len, 0);
for chunk in input.chunks(64) {
if chunk.len() == 64 {
f(chunk);
} else {
self.block[..chunk.len()].copy_from_slice(chunk);
self.len = chunk.len() as u32;
}
}
}
}
impl Md4State {
fn process(&mut self, block: &[u8]) {
fn f(x: u32, y: u32, z: u32) -> u32 {
(x & y) | (!x & z)
}
fn g(x: u32, y: u32, z: u32) -> u32 {
(x & y) | (x & z) | (y & z)
}
fn h(x: u32, y: u32, z: u32) -> u32 {
x ^ y ^ z
}
fn op1(a: u32, b: u32, c: u32, d: u32, k: u32, s: u32) -> u32 {
a.wrapping_add(f(b, c, d))
.wrapping_add(k)
.rotate_left(s)
}
fn op2(a: u32, b: u32, c: u32, d: u32, k: u32, s: u32) -> u32 {
a.wrapping_add(g(b, c, d))
.wrapping_add(k)
.wrapping_add(0x5a827999_u32)
.rotate_left(s)
}
fn op3(a: u32, b: u32, c: u32, d: u32, k: u32, s: u32) -> u32 {
a.wrapping_add(h(b, c, d))
.wrapping_add(k)
.wrapping_add(0x6ed9eba1_u32)
.rotate_left(s)
}
let mut w = [0u32; 16];
read_u32v_le(&mut w, block);
let mut a = self.state[0];
let mut b = self.state[1];
let mut c = self.state[2];
let mut d = self.state[3];
for i in 0..4 {
let j = i * 4;
a = op1(a, b, c, d, w[j ], 3);
d = op1(d, a, b, c, w[j + 1], 7);
c = op1(c, d, a, b, w[j + 2], 11);
b = op1(b, c, d, a, w[j + 3], 19);
}
for i in 0..4 {
a = op2(a, b, c, d, w[i ], 3);
d = op2(d, a, b, c, w[i + 4], 5);
c = op2(c, d, a, b, w[i + 8], 9);
b = op2(b, c, d, a, w[i + 12], 13);
}
for &i in [0, 2, 1, 3].iter() {
a = op3(a, b, c, d, w[i ], 3);
d = op3(d, a, b, c, w[i + 8], 9);
c = op3(c, d, a, b, w[i + 4], 11);
b = op3(b, c, d, a, w[i + 12], 15);
}
self.state[0] = self.state[0].wrapping_add(a);
self.state[1] = self.state[1].wrapping_add(b);
self.state[2] = self.state[2].wrapping_add(c);
self.state[3] = self.state[3].wrapping_add(d);
}
}
impl Clone for Blocks {
fn clone(&self) -> Blocks {
Blocks {..*self }
}
}
#[cfg(test)]
mod tests {
use rust_crypto::digest::Digest;
use super::Md4;
#[test]
fn test_correctness() {
let tests = [
("",
"31d6cfe0d16ae931b73c59d7e0c089c0"),
("a",
"bde52cb31de33e46245e05fbdbd6fb24"),
("abc",
"a448017aaf21d8525fc10ae87aa6729d"),
("abcdefghijklmnopqrstuvwxyz",
"d79e1c308aa5bbcdeea8ed63df412da9"),
("message digest",
"d9130a8164549fe818874806e1c7014b"),
];
let mut m = Md4::new();
for &(input, expected) in tests.iter() {
m.input(input.as_bytes());
assert_eq!(expected, m.result_str());
m.reset();
}
}
}
| {
Md4 {
len: len,
state: state,
blocks: Blocks {
len: 0,
block: [0; 64],
}
}
} | identifier_body |
md4.rs | use std::cmp;
use rust_crypto::digest::Digest;
use util::{write_u64_le, write_u32v_le, read_u32v_le};
const DEFAULT_STATE: Md4State = Md4State {
state: [0x67452301_u32, 0xefcdab89, 0x98badcfe, 0x10325476]
};
#[derive(Copy, Clone)]
pub struct Md4 {
len: u64,
blocks: Blocks,
state: Md4State,
}
#[derive(Copy, Clone)]
pub struct Md4State {
pub state: [u32; 4],
}
#[derive(Copy)]
struct Blocks {
block: [u8; 64],
len: u32,
}
impl Md4 {
pub fn new() -> Md4 {
Md4 {
len: 0,
state: DEFAULT_STATE,
blocks: Blocks {
len: 0,
block: [0; 64],
},
}
}
pub fn from_state(len: u64, state: Md4State) -> Md4 {
Md4 {
len: len,
state: state,
blocks: Blocks {
len: 0,
block: [0; 64],
}
}
}
}
impl Digest for Md4 {
fn input(&mut self, data: &[u8]) {
let len = &mut self.len;
let state = &mut self.state;
self.blocks.input(data, |chunk| {
*len += 64;
state.process(chunk);
});
}
fn result(&mut self, out: &mut [u8]) {
let mut state = self.state;
let bits = (self.len + (self.blocks.len as u64)) << 3;
let mut ml_bytes = [0_u8; 8];
write_u64_le(&mut ml_bytes, bits);
let blocklen = self.blocks.len as usize;
if blocklen < 56 {
self.len += 64;
let mut last = [0_u8; 64];
last[0..blocklen].copy_from_slice(&self.blocks.block[0..blocklen]);
last[blocklen] = 0x80;
last[56..64].copy_from_slice(&ml_bytes);
state.process(&last[0..64]);
} else {
self.len += 128;
let mut last = [0_u8; 128];
last[0..blocklen].copy_from_slice(&self.blocks.block[0..blocklen]);
last[blocklen] = 0x80;
last[120..128].copy_from_slice(&ml_bytes);
state.process(&last[0..64]);
state.process(&last[64..128]);
}
write_u32v_le(out, &state.state);
}
fn reset(&mut self) {
self.state = DEFAULT_STATE;
self.len = 0;
self.blocks.len = 0;
}
fn output_bits(&self) -> usize { 128 }
fn output_bytes(&self) -> usize { 16 }
fn block_size(&self) -> usize { 64 }
}
impl Blocks {
fn input<F>(&mut self, mut input: &[u8], mut f: F) where F: FnMut(&[u8]) {
if self.len > 0 {
let len = self.len as usize;
let amt = cmp::min(input.len(), self.block.len() - len);
self.block[len..len + amt].copy_from_slice(&input[..amt]);
if len + amt == self.block.len() {
f(&self.block);
self.len = 0;
input = &input[amt..];
} else {
self.len += amt as u32;
return
}
}
assert_eq!(self.len, 0);
for chunk in input.chunks(64) {
if chunk.len() == 64 {
f(chunk);
} else {
self.block[..chunk.len()].copy_from_slice(chunk);
self.len = chunk.len() as u32;
}
}
}
}
impl Md4State {
fn process(&mut self, block: &[u8]) {
fn f(x: u32, y: u32, z: u32) -> u32 {
(x & y) | (!x & z)
}
fn g(x: u32, y: u32, z: u32) -> u32 {
(x & y) | (x & z) | (y & z)
}
fn h(x: u32, y: u32, z: u32) -> u32 {
x ^ y ^ z
}
fn op1(a: u32, b: u32, c: u32, d: u32, k: u32, s: u32) -> u32 {
a.wrapping_add(f(b, c, d))
.wrapping_add(k)
.rotate_left(s)
}
fn | (a: u32, b: u32, c: u32, d: u32, k: u32, s: u32) -> u32 {
a.wrapping_add(g(b, c, d))
.wrapping_add(k)
.wrapping_add(0x5a827999_u32)
.rotate_left(s)
}
fn op3(a: u32, b: u32, c: u32, d: u32, k: u32, s: u32) -> u32 {
a.wrapping_add(h(b, c, d))
.wrapping_add(k)
.wrapping_add(0x6ed9eba1_u32)
.rotate_left(s)
}
let mut w = [0u32; 16];
read_u32v_le(&mut w, block);
let mut a = self.state[0];
let mut b = self.state[1];
let mut c = self.state[2];
let mut d = self.state[3];
for i in 0..4 {
let j = i * 4;
a = op1(a, b, c, d, w[j ], 3);
d = op1(d, a, b, c, w[j + 1], 7);
c = op1(c, d, a, b, w[j + 2], 11);
b = op1(b, c, d, a, w[j + 3], 19);
}
for i in 0..4 {
a = op2(a, b, c, d, w[i ], 3);
d = op2(d, a, b, c, w[i + 4], 5);
c = op2(c, d, a, b, w[i + 8], 9);
b = op2(b, c, d, a, w[i + 12], 13);
}
for &i in [0, 2, 1, 3].iter() {
a = op3(a, b, c, d, w[i ], 3);
d = op3(d, a, b, c, w[i + 8], 9);
c = op3(c, d, a, b, w[i + 4], 11);
b = op3(b, c, d, a, w[i + 12], 15);
}
self.state[0] = self.state[0].wrapping_add(a);
self.state[1] = self.state[1].wrapping_add(b);
self.state[2] = self.state[2].wrapping_add(c);
self.state[3] = self.state[3].wrapping_add(d);
}
}
impl Clone for Blocks {
fn clone(&self) -> Blocks {
Blocks {..*self }
}
}
#[cfg(test)]
mod tests {
use rust_crypto::digest::Digest;
use super::Md4;
#[test]
fn test_correctness() {
let tests = [
("",
"31d6cfe0d16ae931b73c59d7e0c089c0"),
("a",
"bde52cb31de33e46245e05fbdbd6fb24"),
("abc",
"a448017aaf21d8525fc10ae87aa6729d"),
("abcdefghijklmnopqrstuvwxyz",
"d79e1c308aa5bbcdeea8ed63df412da9"),
("message digest",
"d9130a8164549fe818874806e1c7014b"),
];
let mut m = Md4::new();
for &(input, expected) in tests.iter() {
m.input(input.as_bytes());
assert_eq!(expected, m.result_str());
m.reset();
}
}
}
| op2 | identifier_name |
md4.rs | use std::cmp;
use rust_crypto::digest::Digest;
use util::{write_u64_le, write_u32v_le, read_u32v_le};
const DEFAULT_STATE: Md4State = Md4State {
state: [0x67452301_u32, 0xefcdab89, 0x98badcfe, 0x10325476]
};
#[derive(Copy, Clone)]
pub struct Md4 {
len: u64,
blocks: Blocks,
state: Md4State,
}
#[derive(Copy, Clone)]
pub struct Md4State {
pub state: [u32; 4],
}
#[derive(Copy)]
struct Blocks {
block: [u8; 64],
len: u32,
}
impl Md4 {
pub fn new() -> Md4 {
Md4 {
len: 0,
state: DEFAULT_STATE,
blocks: Blocks {
len: 0,
block: [0; 64],
},
}
}
pub fn from_state(len: u64, state: Md4State) -> Md4 {
Md4 {
len: len,
state: state,
blocks: Blocks {
len: 0,
block: [0; 64],
}
}
}
}
impl Digest for Md4 {
fn input(&mut self, data: &[u8]) {
let len = &mut self.len;
let state = &mut self.state;
self.blocks.input(data, |chunk| {
*len += 64;
state.process(chunk);
});
}
fn result(&mut self, out: &mut [u8]) {
let mut state = self.state;
let bits = (self.len + (self.blocks.len as u64)) << 3;
let mut ml_bytes = [0_u8; 8];
write_u64_le(&mut ml_bytes, bits);
let blocklen = self.blocks.len as usize;
if blocklen < 56 {
self.len += 64;
let mut last = [0_u8; 64];
last[0..blocklen].copy_from_slice(&self.blocks.block[0..blocklen]);
last[blocklen] = 0x80;
last[56..64].copy_from_slice(&ml_bytes);
state.process(&last[0..64]);
} else {
self.len += 128;
let mut last = [0_u8; 128];
last[0..blocklen].copy_from_slice(&self.blocks.block[0..blocklen]);
last[blocklen] = 0x80;
last[120..128].copy_from_slice(&ml_bytes);
state.process(&last[0..64]);
state.process(&last[64..128]);
}
write_u32v_le(out, &state.state);
}
fn reset(&mut self) {
self.state = DEFAULT_STATE;
self.len = 0;
self.blocks.len = 0;
}
fn output_bits(&self) -> usize { 128 }
fn output_bytes(&self) -> usize { 16 }
fn block_size(&self) -> usize { 64 }
}
impl Blocks {
fn input<F>(&mut self, mut input: &[u8], mut f: F) where F: FnMut(&[u8]) {
if self.len > 0 {
let len = self.len as usize;
let amt = cmp::min(input.len(), self.block.len() - len);
self.block[len..len + amt].copy_from_slice(&input[..amt]);
if len + amt == self.block.len() {
f(&self.block);
self.len = 0;
input = &input[amt..];
} else {
self.len += amt as u32;
return
}
}
assert_eq!(self.len, 0);
for chunk in input.chunks(64) {
if chunk.len() == 64 | else {
self.block[..chunk.len()].copy_from_slice(chunk);
self.len = chunk.len() as u32;
}
}
}
}
impl Md4State {
fn process(&mut self, block: &[u8]) {
fn f(x: u32, y: u32, z: u32) -> u32 {
(x & y) | (!x & z)
}
fn g(x: u32, y: u32, z: u32) -> u32 {
(x & y) | (x & z) | (y & z)
}
fn h(x: u32, y: u32, z: u32) -> u32 {
x ^ y ^ z
}
fn op1(a: u32, b: u32, c: u32, d: u32, k: u32, s: u32) -> u32 {
a.wrapping_add(f(b, c, d))
.wrapping_add(k)
.rotate_left(s)
}
fn op2(a: u32, b: u32, c: u32, d: u32, k: u32, s: u32) -> u32 {
a.wrapping_add(g(b, c, d))
.wrapping_add(k)
.wrapping_add(0x5a827999_u32)
.rotate_left(s)
}
fn op3(a: u32, b: u32, c: u32, d: u32, k: u32, s: u32) -> u32 {
a.wrapping_add(h(b, c, d))
.wrapping_add(k)
.wrapping_add(0x6ed9eba1_u32)
.rotate_left(s)
}
let mut w = [0u32; 16];
read_u32v_le(&mut w, block);
let mut a = self.state[0];
let mut b = self.state[1];
let mut c = self.state[2];
let mut d = self.state[3];
for i in 0..4 {
let j = i * 4;
a = op1(a, b, c, d, w[j ], 3);
d = op1(d, a, b, c, w[j + 1], 7);
c = op1(c, d, a, b, w[j + 2], 11);
b = op1(b, c, d, a, w[j + 3], 19);
}
for i in 0..4 {
a = op2(a, b, c, d, w[i ], 3);
d = op2(d, a, b, c, w[i + 4], 5);
c = op2(c, d, a, b, w[i + 8], 9);
b = op2(b, c, d, a, w[i + 12], 13);
}
for &i in [0, 2, 1, 3].iter() {
a = op3(a, b, c, d, w[i ], 3);
d = op3(d, a, b, c, w[i + 8], 9);
c = op3(c, d, a, b, w[i + 4], 11);
b = op3(b, c, d, a, w[i + 12], 15);
}
self.state[0] = self.state[0].wrapping_add(a);
self.state[1] = self.state[1].wrapping_add(b);
self.state[2] = self.state[2].wrapping_add(c);
self.state[3] = self.state[3].wrapping_add(d);
}
}
impl Clone for Blocks {
fn clone(&self) -> Blocks {
Blocks {..*self }
}
}
#[cfg(test)]
mod tests {
use rust_crypto::digest::Digest;
use super::Md4;
#[test]
fn test_correctness() {
let tests = [
("",
"31d6cfe0d16ae931b73c59d7e0c089c0"),
("a",
"bde52cb31de33e46245e05fbdbd6fb24"),
("abc",
"a448017aaf21d8525fc10ae87aa6729d"),
("abcdefghijklmnopqrstuvwxyz",
"d79e1c308aa5bbcdeea8ed63df412da9"),
("message digest",
"d9130a8164549fe818874806e1c7014b"),
];
let mut m = Md4::new();
for &(input, expected) in tests.iter() {
m.input(input.as_bytes());
assert_eq!(expected, m.result_str());
m.reset();
}
}
}
| {
f(chunk);
} | conditional_block |
md4.rs | use std::cmp;
use rust_crypto::digest::Digest;
use util::{write_u64_le, write_u32v_le, read_u32v_le};
const DEFAULT_STATE: Md4State = Md4State {
state: [0x67452301_u32, 0xefcdab89, 0x98badcfe, 0x10325476]
};
#[derive(Copy, Clone)]
pub struct Md4 {
len: u64,
blocks: Blocks,
state: Md4State,
}
#[derive(Copy, Clone)]
pub struct Md4State {
pub state: [u32; 4],
}
#[derive(Copy)]
struct Blocks {
block: [u8; 64],
len: u32,
}
impl Md4 {
pub fn new() -> Md4 {
Md4 {
len: 0,
state: DEFAULT_STATE,
blocks: Blocks {
len: 0,
block: [0; 64],
},
}
}
pub fn from_state(len: u64, state: Md4State) -> Md4 {
Md4 {
len: len,
state: state,
blocks: Blocks {
len: 0,
block: [0; 64],
}
}
}
}
impl Digest for Md4 {
fn input(&mut self, data: &[u8]) {
let len = &mut self.len;
let state = &mut self.state;
self.blocks.input(data, |chunk| {
*len += 64;
state.process(chunk);
});
}
fn result(&mut self, out: &mut [u8]) {
let mut state = self.state;
let bits = (self.len + (self.blocks.len as u64)) << 3;
let mut ml_bytes = [0_u8; 8];
write_u64_le(&mut ml_bytes, bits);
let blocklen = self.blocks.len as usize;
if blocklen < 56 {
self.len += 64;
let mut last = [0_u8; 64];
last[0..blocklen].copy_from_slice(&self.blocks.block[0..blocklen]);
last[blocklen] = 0x80;
last[56..64].copy_from_slice(&ml_bytes);
state.process(&last[0..64]);
} else {
self.len += 128;
let mut last = [0_u8; 128];
last[0..blocklen].copy_from_slice(&self.blocks.block[0..blocklen]);
last[blocklen] = 0x80;
last[120..128].copy_from_slice(&ml_bytes);
state.process(&last[0..64]);
state.process(&last[64..128]);
}
write_u32v_le(out, &state.state);
}
fn reset(&mut self) {
self.state = DEFAULT_STATE;
self.len = 0;
self.blocks.len = 0;
}
fn output_bits(&self) -> usize { 128 }
fn output_bytes(&self) -> usize { 16 }
fn block_size(&self) -> usize { 64 }
}
impl Blocks {
fn input<F>(&mut self, mut input: &[u8], mut f: F) where F: FnMut(&[u8]) {
if self.len > 0 {
let len = self.len as usize;
let amt = cmp::min(input.len(), self.block.len() - len);
self.block[len..len + amt].copy_from_slice(&input[..amt]);
if len + amt == self.block.len() {
f(&self.block);
self.len = 0;
input = &input[amt..];
} else {
self.len += amt as u32;
return
}
}
assert_eq!(self.len, 0);
for chunk in input.chunks(64) {
if chunk.len() == 64 {
f(chunk);
} else {
self.block[..chunk.len()].copy_from_slice(chunk);
self.len = chunk.len() as u32;
}
}
}
}
impl Md4State {
fn process(&mut self, block: &[u8]) {
fn f(x: u32, y: u32, z: u32) -> u32 {
(x & y) | (!x & z)
}
fn g(x: u32, y: u32, z: u32) -> u32 {
(x & y) | (x & z) | (y & z)
}
fn h(x: u32, y: u32, z: u32) -> u32 {
x ^ y ^ z
}
fn op1(a: u32, b: u32, c: u32, d: u32, k: u32, s: u32) -> u32 {
a.wrapping_add(f(b, c, d))
.wrapping_add(k)
.rotate_left(s)
}
fn op2(a: u32, b: u32, c: u32, d: u32, k: u32, s: u32) -> u32 {
a.wrapping_add(g(b, c, d))
.wrapping_add(k)
.wrapping_add(0x5a827999_u32)
.rotate_left(s)
}
fn op3(a: u32, b: u32, c: u32, d: u32, k: u32, s: u32) -> u32 {
a.wrapping_add(h(b, c, d))
.wrapping_add(k)
.wrapping_add(0x6ed9eba1_u32) | let mut w = [0u32; 16];
read_u32v_le(&mut w, block);
let mut a = self.state[0];
let mut b = self.state[1];
let mut c = self.state[2];
let mut d = self.state[3];
for i in 0..4 {
let j = i * 4;
a = op1(a, b, c, d, w[j ], 3);
d = op1(d, a, b, c, w[j + 1], 7);
c = op1(c, d, a, b, w[j + 2], 11);
b = op1(b, c, d, a, w[j + 3], 19);
}
for i in 0..4 {
a = op2(a, b, c, d, w[i ], 3);
d = op2(d, a, b, c, w[i + 4], 5);
c = op2(c, d, a, b, w[i + 8], 9);
b = op2(b, c, d, a, w[i + 12], 13);
}
for &i in [0, 2, 1, 3].iter() {
a = op3(a, b, c, d, w[i ], 3);
d = op3(d, a, b, c, w[i + 8], 9);
c = op3(c, d, a, b, w[i + 4], 11);
b = op3(b, c, d, a, w[i + 12], 15);
}
self.state[0] = self.state[0].wrapping_add(a);
self.state[1] = self.state[1].wrapping_add(b);
self.state[2] = self.state[2].wrapping_add(c);
self.state[3] = self.state[3].wrapping_add(d);
}
}
impl Clone for Blocks {
fn clone(&self) -> Blocks {
Blocks {..*self }
}
}
#[cfg(test)]
mod tests {
use rust_crypto::digest::Digest;
use super::Md4;
#[test]
fn test_correctness() {
let tests = [
("",
"31d6cfe0d16ae931b73c59d7e0c089c0"),
("a",
"bde52cb31de33e46245e05fbdbd6fb24"),
("abc",
"a448017aaf21d8525fc10ae87aa6729d"),
("abcdefghijklmnopqrstuvwxyz",
"d79e1c308aa5bbcdeea8ed63df412da9"),
("message digest",
"d9130a8164549fe818874806e1c7014b"),
];
let mut m = Md4::new();
for &(input, expected) in tests.iter() {
m.input(input.as_bytes());
assert_eq!(expected, m.result_str());
m.reset();
}
}
} | .rotate_left(s)
}
| random_line_split |
brightnessfilter.rs | rs_allocation gIn;
rs_allocation gOut;
rs_script gScript;
static int mImageWidth;
const uchar4 *gPixels;
void root(const uchar4 *v_in, uchar4 *v_out, const void *usrData, uint32_t x, uint32_t y) {
float4 apixel = rsUnpackColor8888(*v_in);
float3 pixel = apixel.rgb;
float factor = brightnessValue;
pixel = pixel + factor;
pixel = clamp(pixel,0.0f,1.0f);
*v_out = rsPackColorTo8888(pixel.rgb);
}
void filter() {
mImageWidth = rsAllocationGetDimX(gIn);
rsDebug("Image size is ", rsAllocationGetDimX(gIn), rsAllocationGetDimY(gOut));
rsForEach(gScript, gIn, gOut, 0, 0);
} | #pragma version(1)
#pragma rs java_package_name(com.dss.renderscripttest)
float brightnessValue;
| random_line_split |
|
alloc.rs | // This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
//! Memory allocation
//!
//! = Description
//!
//! :allocator: link:lrs::alloc::Allocator[Allocator]
//!
//! This module contains memory allocators and the {allocator} trait implemented by them.
//!
//! = Remarks
//!
//! :max: link:lrs::alloc::MAX_SIZE[MAX_SIZE]
//!
//! The maximum size of an allocation is limited to the maximum value that can be
//! represented in an `isize`. This limit is checked by the allocators and allocation will
//! fail if a too-large allocation is requested. The limit is also available through the
//! {max} constant.
//!
//! == `Heap` and `FbHeap`
//!
//! :heap: link:lrs::alloc::Heap[Heap]
//! :fbheap: link:lrs::alloc::FbHeap[FbHeap]
//!
//! This module contains two type definitions that affect the default behavior of lrs. The
//! {heap} allocator is the default allocator used by types that have an allocator
//! argument. For example, `Vec<T>` is the same as `Vec<T, Heap>`. The {fbheap} allocator
//! is the allocator used by functions that don't allocate in the common case and fall
//! back to allocating if they have to. For example, `File::open` will try to construct a
//! null-terminated path argument on the stack but falls back to allocating with the
//! {fbheap} allocator if the path is too long.
//!
//! == Jemalloc
//!
//! The Jemalloc allocator is only available if lrs was compiled with the `jemalloc`
//! option.
//!
//! = Examples
//!
//! The following example performs a simple allocate-store-read-free operation.
//!
//! ----
//! unsafe {
//! let mem: *mut u8 = try!(Bda::allocate());
//! *mem = 1;
//! println!("{}", *mem);
//! Bda::free(mem);
//! }
//! ----
pub use lrs_alloc::{
MAX_SIZE, empty_ptr, MemPool, Heap, FbHeap, Dummy, TaPool, AlignAlloc, | alloc_array, realloc_array, free_array, alloc, free, OncePool,
};
#[cfg(not(freestanding))] pub use lrs_alloc::{Bda, TlAlc};
#[cfg(not(no_libc))] pub use lrs_alloc::{Libc};
#[cfg(jemalloc)] pub use lrs_alloc::{JeMalloc}; | random_line_split |
|
records.rs | //! Looking up raw records.
use futures::{Async, Future, Poll};
use ::bits::{DNameSlice, MessageBuf};
use ::iana::{Rtype, Class};
use super::super::{Query, Resolver};
use super::super::error::Error;
use super::search::SearchIter;
//------------ lookup_records ------------------------------------------------
/// Creates a future that looks up DNS records.
///
/// The future will use the given resolver to perform a DNS query for the
/// records of type `rtype` associated with `name` in `class`.
/// This differs from calling `resolv.query()` directly in that it can treat
/// relative names. In this case, the resolver configuration is considered
/// to translate the name into a series of absolute names. If you want to
/// find out the name that resulted in a successful answer, you can look at
/// the query in the resulting message.
pub fn lookup_records<N>(resolver: Resolver, name: N, rtype: Rtype,
class: Class) -> LookupRecords
where N: AsRef<DNameSlice> {
let name = name.as_ref();
let mut search = SearchIter::new(resolver.clone(), name);
let search_name = search.as_mut().map(|s| s.next().unwrap());
let query_name = match search_name {
Some(ref name) => name,
None => name
};
let query = resolver.clone().query((query_name, rtype, class));
LookupRecords {
resolver: resolver,
query: query,
search: search,
rtype: rtype,
class: class
}
}
//------------ LookupRecords -------------------------------------------------
/// The future returned by [`lookup_records()`].
///
/// [`lookup_records()`]: fn.lookup_records.html
pub struct LookupRecords {
/// The resolver to run queries on.
resolver: Resolver,
/// The current querry.
query: Query,
/// An optional search list iterator for searching a name.
search: Option<SearchIter>,
/// The resource record type to search for.
rtype: Rtype,
/// The class to search for.
class: Class,
}
//--- Future
impl Future for LookupRecords {
type Item = MessageBuf;
type Error = Error;
fn | (&mut self) -> Poll<Self::Item, Self::Error> {
let err = match self.query.poll() {
Ok(Async::NotReady) => return Ok(Async::NotReady),
Ok(Async::Ready(item)) => return Ok(Async::Ready(item)),
Err(err) => err
};
let name = match self.search {
None => return Err(err),
Some(ref mut search) => {
match search.next() {
None => return Err(err),
Some(name) => name,
}
}
};
self.query = self.resolver.clone()
.query((name, self.rtype, self.class));
self.poll()
}
}
| poll | identifier_name |
records.rs | //! Looking up raw records.
use futures::{Async, Future, Poll};
use ::bits::{DNameSlice, MessageBuf};
use ::iana::{Rtype, Class};
use super::super::{Query, Resolver};
use super::super::error::Error;
use super::search::SearchIter;
//------------ lookup_records ------------------------------------------------
/// Creates a future that looks up DNS records.
///
/// The future will use the given resolver to perform a DNS query for the
/// records of type `rtype` associated with `name` in `class`.
/// This differs from calling `resolv.query()` directly in that it can treat
/// relative names. In this case, the resolver configuration is considered
/// to translate the name into a series of absolute names. If you want to
/// find out the name that resulted in a successful answer, you can look at
/// the query in the resulting message.
pub fn lookup_records<N>(resolver: Resolver, name: N, rtype: Rtype,
class: Class) -> LookupRecords
where N: AsRef<DNameSlice> {
let name = name.as_ref();
let mut search = SearchIter::new(resolver.clone(), name);
let search_name = search.as_mut().map(|s| s.next().unwrap());
let query_name = match search_name {
Some(ref name) => name,
None => name
};
let query = resolver.clone().query((query_name, rtype, class));
LookupRecords {
resolver: resolver,
query: query,
search: search,
rtype: rtype,
class: class
}
}
//------------ LookupRecords -------------------------------------------------
/// The future returned by [`lookup_records()`].
///
/// [`lookup_records()`]: fn.lookup_records.html
pub struct LookupRecords {
/// The resolver to run queries on.
resolver: Resolver,
/// The current querry.
query: Query,
/// An optional search list iterator for searching a name.
search: Option<SearchIter>,
/// The resource record type to search for.
rtype: Rtype,
/// The class to search for.
class: Class,
}
//--- Future
impl Future for LookupRecords {
type Item = MessageBuf;
type Error = Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
let err = match self.query.poll() {
Ok(Async::NotReady) => return Ok(Async::NotReady),
Ok(Async::Ready(item)) => return Ok(Async::Ready(item)),
Err(err) => err
};
let name = match self.search {
None => return Err(err), | None => return Err(err),
Some(name) => name,
}
}
};
self.query = self.resolver.clone()
.query((name, self.rtype, self.class));
self.poll()
}
} | Some(ref mut search) => {
match search.next() { | random_line_split |
globalsystems.rs | extern mod glfw;
extern mod std;
use components::{Components,texture_from_uint};
//GLOBAL SYSTEM DEFINITIONS
pub trait GlobalSystem {
fn process(&mut self, window: &glfw::Window) -> ();
}
pub struct ScoreUpdateSystem {
paddle: @Components,
counter: @Components,
score: uint,
port: Port<uint>
}
impl GlobalSystem for ScoreUpdateSystem {
fn process(&mut self, _: &glfw::Window) -> () {
loop {
match self.port.try_recv() {
Some(i) => {
self.score += i;
}
None => break
}
}
self.counter.sprite.unwrap().texture = Some(texture_from_uint(self.score));
}
}
pub struct BotInputSystem {
paddle: @Components,
ball: @Components
}
impl GlobalSystem for BotInputSystem {
fn process(&mut self, _: &glfw::Window) -> () |
}
pub struct KeyboardInputSystem {
paddle: @Components
}
impl GlobalSystem for KeyboardInputSystem {
fn process(&mut self, window: &glfw::Window) -> () {
let mut dir = 0.0;
if window.get_key(glfw::KeyA) == glfw::Press {
dir += 1.0;
}
if window.get_key(glfw::KeyZ) == glfw::Press {
dir -= 1.0;
}
self.paddle.vert_velocity.unwrap().y = 1.5*dir/60.0;
}
}
| {
let d = self.ball.position.unwrap().y - self.paddle.position.unwrap().y;
if std::num::abs(d) > 0.2 {
if d > 0.0 {
self.paddle.vert_velocity.unwrap().y = 1.5/60.0;
} else {
self.paddle.vert_velocity.unwrap().y = -1.5/60.0;
}
} else {
self.paddle.vert_velocity.unwrap().y = 0.0;
}
} | identifier_body |
globalsystems.rs | extern mod glfw;
extern mod std;
use components::{Components,texture_from_uint};
//GLOBAL SYSTEM DEFINITIONS
pub trait GlobalSystem {
fn process(&mut self, window: &glfw::Window) -> ();
}
pub struct ScoreUpdateSystem {
paddle: @Components,
counter: @Components,
score: uint,
port: Port<uint>
}
impl GlobalSystem for ScoreUpdateSystem {
fn | (&mut self, _: &glfw::Window) -> () {
loop {
match self.port.try_recv() {
Some(i) => {
self.score += i;
}
None => break
}
}
self.counter.sprite.unwrap().texture = Some(texture_from_uint(self.score));
}
}
pub struct BotInputSystem {
paddle: @Components,
ball: @Components
}
impl GlobalSystem for BotInputSystem {
fn process(&mut self, _: &glfw::Window) -> () {
let d = self.ball.position.unwrap().y - self.paddle.position.unwrap().y;
if std::num::abs(d) > 0.2 {
if d > 0.0 {
self.paddle.vert_velocity.unwrap().y = 1.5/60.0;
} else {
self.paddle.vert_velocity.unwrap().y = -1.5/60.0;
}
} else {
self.paddle.vert_velocity.unwrap().y = 0.0;
}
}
}
pub struct KeyboardInputSystem {
paddle: @Components
}
impl GlobalSystem for KeyboardInputSystem {
fn process(&mut self, window: &glfw::Window) -> () {
let mut dir = 0.0;
if window.get_key(glfw::KeyA) == glfw::Press {
dir += 1.0;
}
if window.get_key(glfw::KeyZ) == glfw::Press {
dir -= 1.0;
}
self.paddle.vert_velocity.unwrap().y = 1.5*dir/60.0;
}
}
| process | identifier_name |
globalsystems.rs | extern mod glfw;
extern mod std;
use components::{Components,texture_from_uint};
//GLOBAL SYSTEM DEFINITIONS
pub trait GlobalSystem {
fn process(&mut self, window: &glfw::Window) -> ();
}
pub struct ScoreUpdateSystem {
paddle: @Components,
counter: @Components,
score: uint,
port: Port<uint>
}
impl GlobalSystem for ScoreUpdateSystem {
fn process(&mut self, _: &glfw::Window) -> () {
loop {
match self.port.try_recv() {
Some(i) => {
self.score += i;
}
None => break
}
}
self.counter.sprite.unwrap().texture = Some(texture_from_uint(self.score));
}
}
pub struct BotInputSystem {
paddle: @Components,
ball: @Components
}
impl GlobalSystem for BotInputSystem {
fn process(&mut self, _: &glfw::Window) -> () {
let d = self.ball.position.unwrap().y - self.paddle.position.unwrap().y;
if std::num::abs(d) > 0.2 {
if d > 0.0 {
self.paddle.vert_velocity.unwrap().y = 1.5/60.0;
} else {
self.paddle.vert_velocity.unwrap().y = -1.5/60.0;
} | }
}
}
pub struct KeyboardInputSystem {
paddle: @Components
}
impl GlobalSystem for KeyboardInputSystem {
fn process(&mut self, window: &glfw::Window) -> () {
let mut dir = 0.0;
if window.get_key(glfw::KeyA) == glfw::Press {
dir += 1.0;
}
if window.get_key(glfw::KeyZ) == glfw::Press {
dir -= 1.0;
}
self.paddle.vert_velocity.unwrap().y = 1.5*dir/60.0;
}
} | } else {
self.paddle.vert_velocity.unwrap().y = 0.0; | random_line_split |
sync.rs | #[macro_use]
extern crate indep;
#[macro_use]
extern crate log;
//`sync` mod contains DI set for single-threaded environments (uses Rc<RefCell<>> as an abstraction).
//We pretend that all the DI module traits and implementations are separated into different mods.
//Base trait for all depencencies. May contain no functions, `init` here is just an example.
pub mod base {
pub trait Base {
fn init(&mut self);
}
}
//Sample trait #1
pub mod t1 {
pub trait Trait1 {
fn do1(&self);
}
}
//Sample trait #1
pub mod t2 {
pub trait Trait2 {
fn do2(&self) -> String;
}
}
//Sample trait #1
pub mod t3 {
pub trait Trait3 {
fn do3(&mut self);
}
}
//Sample implementation struct #1 - implements trait #1 and trait #2 (and, of course, trait Base).
pub mod i1 {
use super::{Dependency,Dependent,Implementation};
use super::base::Base;
use super::t1::Trait1;
use super::t2::Trait2;
use std::rc::Rc;
use std::cell::RefCell;
struct Impl1;
impl Impl1 {
pub fn foo(&self) {
println!("foo from Impl1");
}
pub fn new() -> Impl1 {
Impl1
}
}
impl Trait1 for Box<Impl1> {
fn do1(&self) {
self.foo();
}
}
impl Trait2 for Box<Impl1> {
fn | (&self) -> String {
format!("Impl1 says 'Trait2'")
}
}
impl Base for Box<Impl1> {
fn init(&mut self) {
self.foo();
}
}
//Here comes `indep`.
//This macro defines requirements of the DI module implementation.
//The syntax is {Impl_Name, [requirement_name_1: requirement_trait_1, requirement_name_2: requirement_trait_2],... }.
//Here `Impl1` does not have dependecies, so its requirement array is empty.
indep_reqs_sync!{Impl1, []}
//This macro defines the implementations of the DI module. The syntax is {Impl_Name, Base_Trait_Name, [trait_1, trait_2,... ]}
//`Impl1` implements `Trait1` and `Trait2`, so it is stated in a macro.
indep_impls_sync!{Impl1, Base, [Trait1,Trait2]}
//This macro generates default implementation of the `new()` function, that returns Box<Dependency>.
//The `Dependency` itself is an trait that DI pool accepts. Internally it is always implemented by a struct `RcRefCellBox`,
//which is a wrapper for Rc<RefCell<Box<Impl1>>>. This wrapper is required for Rust typecasting.
indep_default_new_sync!{Impl1}
}
//Sample implementation struct #2 - implements trait #2 (and trait Base). Depends on `Trait1` - see the mention of `indep_reqs_sync` below.
pub mod i2 {
use super::{Dependency,Dependent,Implementation};
use super::t1::Trait1;
use super::t2::Trait2;
use super::base::Base;
use std::rc::Rc;
use std::cell::RefCell;
struct Impl2 {
//It is an Indep library requirement to have injectable dependencies in a form of Option<Rc<RefCell<Trait>>>
t1: Option<Rc<RefCell<Trait1>>>
}
impl Impl2 {
pub fn boo(&self) {
println!("boo from Impl2");
let b = self.t1.as_ref().unwrap();
b.borrow().do1();
}
pub fn new() -> Impl2 {
Impl2 {
t1:None
}
}
}
impl Trait2 for Box<Impl2> {
fn do2(&self) -> String {
self.boo();
format!("Impl2 says 'Trait2'")
}
}
impl Base for Box<Impl2> {
fn init(&mut self) {
self.boo();
}
}
//`Impl2` requires `Trait1` inside as member named `t1`. `t1` should have type Option<Rc<RefCell<Trait1>>>.
indep_reqs_sync!{Impl2, [Trait1: [t1]]}
//See corresponding statement for `Impl1` above. The implementation struct should (obviously) implement at least one DI trait.
indep_impls_sync!{Impl2, Base, [Trait2]}
//See corresponding statement for `Impl1` above.
indep_default_new_sync!{Impl2}
}
//Sample implementation struct #3 - implements trait #3 (and trait Base). Depends on `Trait1` as `t1_1`,
//`Trait2` as `t2_1` and `t2_2` (sepatare instances) - see the mention of `indep_reqs_async` below.
pub mod i3 {
use super::{Dependency,Dependent,Implementation};
use super::t3::Trait3;
use super::t2::Trait2;
use super::t1::Trait1;
use super::base::Base;
use std::rc::Rc;
use std::cell::RefCell;
//It is an Indep library requirement to have injectable dependencies in a form of Option<Rc<RefCell<Trait>>>
struct Impl3 {
t2_1: Option<Rc<RefCell<Trait2>>>,
t2_2: Option<Rc<RefCell<Trait2>>>,
t1_1: Option<Rc<RefCell<Trait1>>>
}
impl Impl3 {
pub fn oo(&mut self) {
let b1 = self.t2_1.as_mut().unwrap();
let b2 = self.t2_2.as_mut().unwrap();
let b3 = self.t1_1.as_mut().unwrap();
println!("oo from Impl3: \n1: {}\n2: {}",
b1.borrow_mut().do2(),
b2.borrow_mut().do2()
);
b3.borrow().do1();
}
pub fn new() -> Impl3 {
Impl3 {
t1_1: None,
t2_1: None,
t2_2: None,
}
}
}
impl Trait3 for Box<Impl3> {
fn do3(&mut self) {
self.oo();
}
}
impl Base for Box<Impl3> {
fn init(&mut self) {
self.oo();
}
}
//`Impl3` depends on `Trait1` and two different instances of `Trait2`.
indep_reqs_sync!{Impl3, [Trait1: [t1_1], Trait2: [t2_1,t2_2]]}
//See corresponding statement for `Impl1` above. The implementation struct should (obviously) implement at least one DI trait.
indep_impls_sync!{Impl3, Base, [Trait3]}
//See corresponding statement for `Impl1` above.
indep_default_new_sync!{Impl3}
}
use self::t3::Trait3;
use self::t2::Trait2;
use self::t1::Trait1;
use self::base::Base;
//Initialize all the DI classes and traits The syntax is {Base_Trait, Trait1, Trait2,... },
//where Trait# is a trait which needs to be DI-enabled.
indep_pool_sync!{Base, Trait1,Trait2,Trait3}
fn main() {
//`Pool` is a structure created by `indep_pool_sync` macro.
let mut pool = Pool::new();
let t1 = i1::new_dep();
let t2 = i2::new_dep();
let t3 = i3::new_dep();
//Here we mark this struct with a special tag so it will be injected only to similarly named member of a dependent struct.
pool.add_tagged(t1, vec!["t1_1".to_string()]);
//Add DI dependency with no tag, which means it will be injected to any struct that has a dependency of the corresponding trait.
pool.add(t2);
//Same here.
pool.add(t3);
//`stat()` is a simple utility method of a `Pool` that shows its content name-wise.
println!("Pool stat: {}", pool.stat());
}
| do2 | identifier_name |
sync.rs | #[macro_use]
extern crate indep;
#[macro_use]
extern crate log;
//`sync` mod contains DI set for single-threaded environments (uses Rc<RefCell<>> as an abstraction).
//We pretend that all the DI module traits and implementations are separated into different mods.
//Base trait for all depencencies. May contain no functions, `init` here is just an example.
pub mod base {
pub trait Base {
fn init(&mut self);
}
}
//Sample trait #1
pub mod t1 {
pub trait Trait1 {
fn do1(&self);
}
}
//Sample trait #1
pub mod t2 {
pub trait Trait2 {
fn do2(&self) -> String;
}
}
//Sample trait #1
pub mod t3 {
pub trait Trait3 {
fn do3(&mut self);
}
}
//Sample implementation struct #1 - implements trait #1 and trait #2 (and, of course, trait Base).
pub mod i1 {
use super::{Dependency,Dependent,Implementation};
use super::base::Base;
use super::t1::Trait1;
use super::t2::Trait2;
use std::rc::Rc;
use std::cell::RefCell;
struct Impl1;
impl Impl1 {
pub fn foo(&self) {
println!("foo from Impl1");
}
pub fn new() -> Impl1 {
Impl1
}
}
impl Trait1 for Box<Impl1> {
fn do1(&self) {
self.foo();
}
}
impl Trait2 for Box<Impl1> {
fn do2(&self) -> String {
format!("Impl1 says 'Trait2'")
}
}
impl Base for Box<Impl1> {
fn init(&mut self) {
self.foo(); |
//Here comes `indep`.
//This macro defines requirements of the DI module implementation.
//The syntax is {Impl_Name, [requirement_name_1: requirement_trait_1, requirement_name_2: requirement_trait_2],... }.
//Here `Impl1` does not have dependecies, so its requirement array is empty.
indep_reqs_sync!{Impl1, []}
//This macro defines the implementations of the DI module. The syntax is {Impl_Name, Base_Trait_Name, [trait_1, trait_2,... ]}
//`Impl1` implements `Trait1` and `Trait2`, so it is stated in a macro.
indep_impls_sync!{Impl1, Base, [Trait1,Trait2]}
//This macro generates default implementation of the `new()` function, that returns Box<Dependency>.
//The `Dependency` itself is an trait that DI pool accepts. Internally it is always implemented by a struct `RcRefCellBox`,
//which is a wrapper for Rc<RefCell<Box<Impl1>>>. This wrapper is required for Rust typecasting.
indep_default_new_sync!{Impl1}
}
//Sample implementation struct #2 - implements trait #2 (and trait Base). Depends on `Trait1` - see the mention of `indep_reqs_sync` below.
pub mod i2 {
use super::{Dependency,Dependent,Implementation};
use super::t1::Trait1;
use super::t2::Trait2;
use super::base::Base;
use std::rc::Rc;
use std::cell::RefCell;
struct Impl2 {
//It is an Indep library requirement to have injectable dependencies in a form of Option<Rc<RefCell<Trait>>>
t1: Option<Rc<RefCell<Trait1>>>
}
impl Impl2 {
pub fn boo(&self) {
println!("boo from Impl2");
let b = self.t1.as_ref().unwrap();
b.borrow().do1();
}
pub fn new() -> Impl2 {
Impl2 {
t1:None
}
}
}
impl Trait2 for Box<Impl2> {
fn do2(&self) -> String {
self.boo();
format!("Impl2 says 'Trait2'")
}
}
impl Base for Box<Impl2> {
fn init(&mut self) {
self.boo();
}
}
//`Impl2` requires `Trait1` inside as member named `t1`. `t1` should have type Option<Rc<RefCell<Trait1>>>.
indep_reqs_sync!{Impl2, [Trait1: [t1]]}
//See corresponding statement for `Impl1` above. The implementation struct should (obviously) implement at least one DI trait.
indep_impls_sync!{Impl2, Base, [Trait2]}
//See corresponding statement for `Impl1` above.
indep_default_new_sync!{Impl2}
}
//Sample implementation struct #3 - implements trait #3 (and trait Base). Depends on `Trait1` as `t1_1`,
//`Trait2` as `t2_1` and `t2_2` (sepatare instances) - see the mention of `indep_reqs_async` below.
pub mod i3 {
use super::{Dependency,Dependent,Implementation};
use super::t3::Trait3;
use super::t2::Trait2;
use super::t1::Trait1;
use super::base::Base;
use std::rc::Rc;
use std::cell::RefCell;
//It is an Indep library requirement to have injectable dependencies in a form of Option<Rc<RefCell<Trait>>>
struct Impl3 {
t2_1: Option<Rc<RefCell<Trait2>>>,
t2_2: Option<Rc<RefCell<Trait2>>>,
t1_1: Option<Rc<RefCell<Trait1>>>
}
impl Impl3 {
pub fn oo(&mut self) {
let b1 = self.t2_1.as_mut().unwrap();
let b2 = self.t2_2.as_mut().unwrap();
let b3 = self.t1_1.as_mut().unwrap();
println!("oo from Impl3: \n1: {}\n2: {}",
b1.borrow_mut().do2(),
b2.borrow_mut().do2()
);
b3.borrow().do1();
}
pub fn new() -> Impl3 {
Impl3 {
t1_1: None,
t2_1: None,
t2_2: None,
}
}
}
impl Trait3 for Box<Impl3> {
fn do3(&mut self) {
self.oo();
}
}
impl Base for Box<Impl3> {
fn init(&mut self) {
self.oo();
}
}
//`Impl3` depends on `Trait1` and two different instances of `Trait2`.
indep_reqs_sync!{Impl3, [Trait1: [t1_1], Trait2: [t2_1,t2_2]]}
//See corresponding statement for `Impl1` above. The implementation struct should (obviously) implement at least one DI trait.
indep_impls_sync!{Impl3, Base, [Trait3]}
//See corresponding statement for `Impl1` above.
indep_default_new_sync!{Impl3}
}
use self::t3::Trait3;
use self::t2::Trait2;
use self::t1::Trait1;
use self::base::Base;
//Initialize all the DI classes and traits The syntax is {Base_Trait, Trait1, Trait2,... },
//where Trait# is a trait which needs to be DI-enabled.
indep_pool_sync!{Base, Trait1,Trait2,Trait3}
fn main() {
//`Pool` is a structure created by `indep_pool_sync` macro.
let mut pool = Pool::new();
let t1 = i1::new_dep();
let t2 = i2::new_dep();
let t3 = i3::new_dep();
//Here we mark this struct with a special tag so it will be injected only to similarly named member of a dependent struct.
pool.add_tagged(t1, vec!["t1_1".to_string()]);
//Add DI dependency with no tag, which means it will be injected to any struct that has a dependency of the corresponding trait.
pool.add(t2);
//Same here.
pool.add(t3);
//`stat()` is a simple utility method of a `Pool` that shows its content name-wise.
println!("Pool stat: {}", pool.stat());
} | }
} | random_line_split |
sync.rs | #[macro_use]
extern crate indep;
#[macro_use]
extern crate log;
//`sync` mod contains DI set for single-threaded environments (uses Rc<RefCell<>> as an abstraction).
//We pretend that all the DI module traits and implementations are separated into different mods.
//Base trait for all depencencies. May contain no functions, `init` here is just an example.
pub mod base {
pub trait Base {
fn init(&mut self);
}
}
//Sample trait #1
pub mod t1 {
pub trait Trait1 {
fn do1(&self);
}
}
//Sample trait #1
pub mod t2 {
pub trait Trait2 {
fn do2(&self) -> String;
}
}
//Sample trait #1
pub mod t3 {
pub trait Trait3 {
fn do3(&mut self);
}
}
//Sample implementation struct #1 - implements trait #1 and trait #2 (and, of course, trait Base).
pub mod i1 {
use super::{Dependency,Dependent,Implementation};
use super::base::Base;
use super::t1::Trait1;
use super::t2::Trait2;
use std::rc::Rc;
use std::cell::RefCell;
struct Impl1;
impl Impl1 {
pub fn foo(&self) {
println!("foo from Impl1");
}
pub fn new() -> Impl1 {
Impl1
}
}
impl Trait1 for Box<Impl1> {
fn do1(&self) {
self.foo();
}
}
impl Trait2 for Box<Impl1> {
fn do2(&self) -> String {
format!("Impl1 says 'Trait2'")
}
}
impl Base for Box<Impl1> {
fn init(&mut self) {
self.foo();
}
}
//Here comes `indep`.
//This macro defines requirements of the DI module implementation.
//The syntax is {Impl_Name, [requirement_name_1: requirement_trait_1, requirement_name_2: requirement_trait_2],... }.
//Here `Impl1` does not have dependecies, so its requirement array is empty.
indep_reqs_sync!{Impl1, []}
//This macro defines the implementations of the DI module. The syntax is {Impl_Name, Base_Trait_Name, [trait_1, trait_2,... ]}
//`Impl1` implements `Trait1` and `Trait2`, so it is stated in a macro.
indep_impls_sync!{Impl1, Base, [Trait1,Trait2]}
//This macro generates default implementation of the `new()` function, that returns Box<Dependency>.
//The `Dependency` itself is an trait that DI pool accepts. Internally it is always implemented by a struct `RcRefCellBox`,
//which is a wrapper for Rc<RefCell<Box<Impl1>>>. This wrapper is required for Rust typecasting.
indep_default_new_sync!{Impl1}
}
//Sample implementation struct #2 - implements trait #2 (and trait Base). Depends on `Trait1` - see the mention of `indep_reqs_sync` below.
pub mod i2 {
use super::{Dependency,Dependent,Implementation};
use super::t1::Trait1;
use super::t2::Trait2;
use super::base::Base;
use std::rc::Rc;
use std::cell::RefCell;
struct Impl2 {
//It is an Indep library requirement to have injectable dependencies in a form of Option<Rc<RefCell<Trait>>>
t1: Option<Rc<RefCell<Trait1>>>
}
impl Impl2 {
pub fn boo(&self) {
println!("boo from Impl2");
let b = self.t1.as_ref().unwrap();
b.borrow().do1();
}
pub fn new() -> Impl2 {
Impl2 {
t1:None
}
}
}
impl Trait2 for Box<Impl2> {
fn do2(&self) -> String {
self.boo();
format!("Impl2 says 'Trait2'")
}
}
impl Base for Box<Impl2> {
fn init(&mut self) {
self.boo();
}
}
//`Impl2` requires `Trait1` inside as member named `t1`. `t1` should have type Option<Rc<RefCell<Trait1>>>.
indep_reqs_sync!{Impl2, [Trait1: [t1]]}
//See corresponding statement for `Impl1` above. The implementation struct should (obviously) implement at least one DI trait.
indep_impls_sync!{Impl2, Base, [Trait2]}
//See corresponding statement for `Impl1` above.
indep_default_new_sync!{Impl2}
}
//Sample implementation struct #3 - implements trait #3 (and trait Base). Depends on `Trait1` as `t1_1`,
//`Trait2` as `t2_1` and `t2_2` (sepatare instances) - see the mention of `indep_reqs_async` below.
pub mod i3 {
use super::{Dependency,Dependent,Implementation};
use super::t3::Trait3;
use super::t2::Trait2;
use super::t1::Trait1;
use super::base::Base;
use std::rc::Rc;
use std::cell::RefCell;
//It is an Indep library requirement to have injectable dependencies in a form of Option<Rc<RefCell<Trait>>>
struct Impl3 {
t2_1: Option<Rc<RefCell<Trait2>>>,
t2_2: Option<Rc<RefCell<Trait2>>>,
t1_1: Option<Rc<RefCell<Trait1>>>
}
impl Impl3 {
pub fn oo(&mut self) {
let b1 = self.t2_1.as_mut().unwrap();
let b2 = self.t2_2.as_mut().unwrap();
let b3 = self.t1_1.as_mut().unwrap();
println!("oo from Impl3: \n1: {}\n2: {}",
b1.borrow_mut().do2(),
b2.borrow_mut().do2()
);
b3.borrow().do1();
}
pub fn new() -> Impl3 {
Impl3 {
t1_1: None,
t2_1: None,
t2_2: None,
}
}
}
impl Trait3 for Box<Impl3> {
fn do3(&mut self) {
self.oo();
}
}
impl Base for Box<Impl3> {
fn init(&mut self) {
self.oo();
}
}
//`Impl3` depends on `Trait1` and two different instances of `Trait2`.
indep_reqs_sync!{Impl3, [Trait1: [t1_1], Trait2: [t2_1,t2_2]]}
//See corresponding statement for `Impl1` above. The implementation struct should (obviously) implement at least one DI trait.
indep_impls_sync!{Impl3, Base, [Trait3]}
//See corresponding statement for `Impl1` above.
indep_default_new_sync!{Impl3}
}
use self::t3::Trait3;
use self::t2::Trait2;
use self::t1::Trait1;
use self::base::Base;
//Initialize all the DI classes and traits The syntax is {Base_Trait, Trait1, Trait2,... },
//where Trait# is a trait which needs to be DI-enabled.
indep_pool_sync!{Base, Trait1,Trait2,Trait3}
fn main() | {
//`Pool` is a structure created by `indep_pool_sync` macro.
let mut pool = Pool::new();
let t1 = i1::new_dep();
let t2 = i2::new_dep();
let t3 = i3::new_dep();
//Here we mark this struct with a special tag so it will be injected only to similarly named member of a dependent struct.
pool.add_tagged(t1, vec!["t1_1".to_string()]);
//Add DI dependency with no tag, which means it will be injected to any struct that has a dependency of the corresponding trait.
pool.add(t2);
//Same here.
pool.add(t3);
//`stat()` is a simple utility method of a `Pool` that shows its content name-wise.
println!("Pool stat: {}", pool.stat());
} | identifier_body |
|
direction.rs | use specs::{ReadStorage, System, WriteStorage};
use crate::combat::components::intent::XAxis;
use crate::combat::components::movement::get_distance;
use crate::combat::components::{Action, AiState, Command, Facing, Intent, Position, State};
pub struct PlayerDirection;
impl<'a> System<'a> for PlayerDirection {
type SystemData = (ReadStorage<'a, Intent>, WriteStorage<'a, State>);
fn run(&mut self, (intent, mut state): Self::SystemData) {
use specs::Join;
for (intent, state) in (&intent, &mut state).join() {
match state.action {
Action::Idle | Action::Move {.. } => {
if let Command::Move { x,.. } = intent.command {
match x {
XAxis::Right => state.direction = Facing::Right,
XAxis::Left => state.direction = Facing::Left,
_ => (),
}
}
}
_ => (),
}
}
}
}
pub struct AiDirection;
impl<'a> System<'a> for AiDirection {
type SystemData = (
ReadStorage<'a, Intent>,
ReadStorage<'a, AiState>,
ReadStorage<'a, Position>,
WriteStorage<'a, State>,
);
fn run(&mut self, (intent, ai_state, position_storage, mut state): Self::SystemData) | }
_ => (),
}
}
}
}
| {
use specs::Join;
for (intent, ai_state, position, state) in
(&intent, &ai_state, &position_storage, &mut state).join()
{
match state.action {
Action::Idle | Action::Move { .. } => {
if let Command::Move { .. } = intent.command {
let target_position: Option<&Position> =
ai_state.target.and_then(|t| position_storage.get(t));
if let Some(target_position) = target_position {
let delta = get_distance(position, target_position);
if delta.x < 0 {
state.direction = Facing::Right;
} else {
state.direction = Facing::Left;
}
}
} | identifier_body |
direction.rs | use specs::{ReadStorage, System, WriteStorage};
use crate::combat::components::intent::XAxis;
use crate::combat::components::movement::get_distance;
use crate::combat::components::{Action, AiState, Command, Facing, Intent, Position, State};
pub struct PlayerDirection;
impl<'a> System<'a> for PlayerDirection {
type SystemData = (ReadStorage<'a, Intent>, WriteStorage<'a, State>);
fn run(&mut self, (intent, mut state): Self::SystemData) {
use specs::Join;
for (intent, state) in (&intent, &mut state).join() {
match state.action {
Action::Idle | Action::Move {.. } => {
if let Command::Move { x,.. } = intent.command {
match x {
XAxis::Right => state.direction = Facing::Right,
XAxis::Left => state.direction = Facing::Left,
_ => (),
}
}
}
_ => (),
} |
impl<'a> System<'a> for AiDirection {
type SystemData = (
ReadStorage<'a, Intent>,
ReadStorage<'a, AiState>,
ReadStorage<'a, Position>,
WriteStorage<'a, State>,
);
fn run(&mut self, (intent, ai_state, position_storage, mut state): Self::SystemData) {
use specs::Join;
for (intent, ai_state, position, state) in
(&intent, &ai_state, &position_storage, &mut state).join()
{
match state.action {
Action::Idle | Action::Move {.. } => {
if let Command::Move {.. } = intent.command {
let target_position: Option<&Position> =
ai_state.target.and_then(|t| position_storage.get(t));
if let Some(target_position) = target_position {
let delta = get_distance(position, target_position);
if delta.x < 0 {
state.direction = Facing::Right;
} else {
state.direction = Facing::Left;
}
}
}
}
_ => (),
}
}
}
} | }
}
}
pub struct AiDirection; | random_line_split |
direction.rs | use specs::{ReadStorage, System, WriteStorage};
use crate::combat::components::intent::XAxis;
use crate::combat::components::movement::get_distance;
use crate::combat::components::{Action, AiState, Command, Facing, Intent, Position, State};
pub struct PlayerDirection;
impl<'a> System<'a> for PlayerDirection {
type SystemData = (ReadStorage<'a, Intent>, WriteStorage<'a, State>);
fn run(&mut self, (intent, mut state): Self::SystemData) {
use specs::Join;
for (intent, state) in (&intent, &mut state).join() {
match state.action {
Action::Idle | Action::Move {.. } => {
if let Command::Move { x,.. } = intent.command {
match x {
XAxis::Right => state.direction = Facing::Right,
XAxis::Left => state.direction = Facing::Left,
_ => (),
}
}
}
_ => (),
}
}
}
}
pub struct | ;
impl<'a> System<'a> for AiDirection {
type SystemData = (
ReadStorage<'a, Intent>,
ReadStorage<'a, AiState>,
ReadStorage<'a, Position>,
WriteStorage<'a, State>,
);
fn run(&mut self, (intent, ai_state, position_storage, mut state): Self::SystemData) {
use specs::Join;
for (intent, ai_state, position, state) in
(&intent, &ai_state, &position_storage, &mut state).join()
{
match state.action {
Action::Idle | Action::Move {.. } => {
if let Command::Move {.. } = intent.command {
let target_position: Option<&Position> =
ai_state.target.and_then(|t| position_storage.get(t));
if let Some(target_position) = target_position {
let delta = get_distance(position, target_position);
if delta.x < 0 {
state.direction = Facing::Right;
} else {
state.direction = Facing::Left;
}
}
}
}
_ => (),
}
}
}
}
| AiDirection | identifier_name |
formatter.rs | use ansi_term::Colour::Green;
use ansi_term::Colour::Yellow;
use app::machine::Machine;
fn get_empty_line() -> String {
String::from("")
}
fn get_header() -> String {
let o = format!("{0: ^10} | {1: ^10} | {2: ^10} | {3: ^10}",
"Number",
"Name",
"State",
"Path");
format!("{}", Yellow.paint(o))
}
fn get_machine_line(machine: &Machine) -> String |
fn get_separator() -> String {
let s = format!("{0: ^10} | {1: ^10} | {2: ^10} | {3: ^10}",
"----------",
"----------",
"----------",
"----------");
format!("{}", Yellow.paint(s))
}
pub fn format(machines: &[Machine]) -> String {
let mut lines = Vec::new();
lines.push(get_empty_line());
lines.push(get_header());
lines.push(get_separator());
for machine in machines {
lines.push(get_machine_line(machine));
}
lines.push(get_empty_line());
lines.join("\n")
}
| {
let line = format!("{0: ^10} | {1: ^10} | {2: ^10} | {3: ^10}",
machine.get_number(),
machine.get_name(),
machine.get_state(),
machine.get_path());
format!("{}", Green.paint(line))
} | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.