file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
error.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Utilities to throw exceptions from Rust bindings.
#[cfg(feature = "js_backtrace")]
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::DOMExceptionBinding::DOMExceptionMethods;
use crate::dom::bindings::codegen::PrototypeList::proto_id_to_name;
use crate::dom::bindings::conversions::root_from_object;
use crate::dom::bindings::conversions::{
ConversionResult, FromJSValConvertible, ToJSValConvertible,
};
use crate::dom::bindings::str::USVString;
use crate::dom::domexception::{DOMErrorName, DOMException};
use crate::dom::globalscope::GlobalScope;
#[cfg(feature = "js_backtrace")]
use backtrace::Backtrace;
use js::error::{throw_range_error, throw_type_error};
use js::jsapi::JSContext;
use js::jsapi::JS_ClearPendingException;
use js::jsapi::JS_IsExceptionPending;
use js::jsval::UndefinedValue;
use js::rust::wrappers::JS_ErrorFromException;
use js::rust::wrappers::JS_GetPendingException;
use js::rust::wrappers::JS_SetPendingException;
use js::rust::HandleObject;
use js::rust::MutableHandleValue;
use libc::c_uint;
use std::slice::from_raw_parts;
#[cfg(feature = "js_backtrace")]
thread_local! {
/// An optional stringified JS backtrace and stringified native backtrace from the
/// the last DOM exception that was reported.
static LAST_EXCEPTION_BACKTRACE: DomRefCell<Option<(Option<String>, String)>> = DomRefCell::new(None);
}
/// DOM exceptions that can be thrown by a native DOM method.
#[derive(Clone, Debug, MallocSizeOf)]
pub enum Error {
/// IndexSizeError DOMException
IndexSize,
/// NotFoundError DOMException
NotFound,
/// HierarchyRequestError DOMException
HierarchyRequest,
/// WrongDocumentError DOMException
WrongDocument,
/// InvalidCharacterError DOMException
InvalidCharacter,
/// NotSupportedError DOMException
NotSupported,
/// InUseAttributeError DOMException
InUseAttribute,
/// InvalidStateError DOMException
InvalidState,
/// SyntaxError DOMException
Syntax,
/// NamespaceError DOMException
Namespace,
/// InvalidAccessError DOMException
InvalidAccess,
/// SecurityError DOMException
Security,
/// NetworkError DOMException
Network,
/// AbortError DOMException
Abort,
/// TimeoutError DOMException
Timeout,
/// InvalidNodeTypeError DOMException
InvalidNodeType,
/// DataCloneError DOMException
DataClone,
/// NoModificationAllowedError DOMException
NoModificationAllowed,
/// QuotaExceededError DOMException
QuotaExceeded,
/// TypeMismatchError DOMException
TypeMismatch,
/// InvalidModificationError DOMException
InvalidModification,
/// NotReadableError DOMException
NotReadable,
/// TypeError JavaScript Error
Type(String),
/// RangeError JavaScript Error
Range(String),
/// A JavaScript exception is already pending.
JSFailed,
}
/// The return type for IDL operations that can throw DOM exceptions.
pub type Fallible<T> = Result<T, Error>;
/// The return type for IDL operations that can throw DOM exceptions and
/// return `()`.
pub type ErrorResult = Fallible<()>;
/// Set a pending exception for the given `result` on `cx`.
pub unsafe fn throw_dom_exception(cx: *mut JSContext, global: &GlobalScope, result: Error) {
#[cfg(feature = "js_backtrace")]
{
capture_stack!(in(cx) let stack);
let js_stack = stack.and_then(|s| s.as_string(None));
let rust_stack = Backtrace::new();
LAST_EXCEPTION_BACKTRACE.with(|backtrace| {
*backtrace.borrow_mut() = Some((js_stack, format!("{:?}", rust_stack)));
});
}
let code = match result {
Error::IndexSize => DOMErrorName::IndexSizeError,
Error::NotFound => DOMErrorName::NotFoundError,
Error::HierarchyRequest => DOMErrorName::HierarchyRequestError,
Error::WrongDocument => DOMErrorName::WrongDocumentError,
Error::InvalidCharacter => DOMErrorName::InvalidCharacterError,
Error::NotSupported => DOMErrorName::NotSupportedError,
Error::InUseAttribute => DOMErrorName::InUseAttributeError,
Error::InvalidState => DOMErrorName::InvalidStateError,
Error::Syntax => DOMErrorName::SyntaxError,
Error::Namespace => DOMErrorName::NamespaceError,
Error::InvalidAccess => DOMErrorName::InvalidAccessError,
Error::Security => DOMErrorName::SecurityError,
Error::Network => DOMErrorName::NetworkError,
Error::Abort => DOMErrorName::AbortError,
Error::Timeout => DOMErrorName::TimeoutError,
Error::InvalidNodeType => DOMErrorName::InvalidNodeTypeError,
Error::DataClone => DOMErrorName::DataCloneError,
Error::NoModificationAllowed => DOMErrorName::NoModificationAllowedError,
Error::QuotaExceeded => DOMErrorName::QuotaExceededError,
Error::TypeMismatch => DOMErrorName::TypeMismatchError,
Error::InvalidModification => DOMErrorName::InvalidModificationError,
Error::NotReadable => DOMErrorName::NotReadableError,
Error::Type(message) => {
assert!(!JS_IsExceptionPending(cx));
throw_type_error(cx, &message);
return;
},
Error::Range(message) => {
assert!(!JS_IsExceptionPending(cx));
throw_range_error(cx, &message);
return;
},
Error::JSFailed => {
assert!(JS_IsExceptionPending(cx));
return;
},
};
assert!(!JS_IsExceptionPending(cx));
let exception = DOMException::new(global, code);
rooted!(in(cx) let mut thrown = UndefinedValue());
exception.to_jsval(cx, thrown.handle_mut());
JS_SetPendingException(cx, thrown.handle());
}
/// A struct encapsulating information about a runtime script error.
pub struct ErrorInfo {
/// The error message.
pub message: String,
/// The file name.
pub filename: String,
/// The line number.
pub lineno: c_uint,
/// The column number.
pub column: c_uint,
}
impl ErrorInfo {
unsafe fn from_native_error(cx: *mut JSContext, object: HandleObject) -> Option<ErrorInfo> {
let report = JS_ErrorFromException(cx, object);
if report.is_null() {
return None;
}
let filename = {
let filename = (*report)._base.filename as *const u8;
if!filename.is_null() {
let length = (0..).find(|idx| *filename.offset(*idx) == 0).unwrap();
let filename = from_raw_parts(filename, length as usize);
String::from_utf8_lossy(filename).into_owned()
} else {
"none".to_string()
}
};
let lineno = (*report)._base.lineno;
let column = (*report)._base.column;
let message = {
let message = (*report)._base.message_.data_ as *const u8;
let length = (0..).find(|idx| *message.offset(*idx) == 0).unwrap();
let message = from_raw_parts(message, length as usize);
String::from_utf8_lossy(message).into_owned()
};
Some(ErrorInfo {
filename: filename,
message: message,
lineno: lineno,
column: column,
})
}
fn from_dom_exception(object: HandleObject) -> Option<ErrorInfo> {
let exception = match root_from_object::<DOMException>(object.get()) {
Ok(exception) => exception,
Err(_) => return None,
};
Some(ErrorInfo {
filename: "".to_string(),
message: exception.Stringifier().into(),
lineno: 0,
column: 0,
})
}
}
/// Report a pending exception, thereby clearing it.
///
/// The `dispatch_event` argument is temporary and non-standard; passing false
/// prevents dispatching the `error` event.
pub unsafe fn report_pending_exception(cx: *mut JSContext, dispatch_event: bool) {
if!JS_IsExceptionPending(cx) {
return;
}
rooted!(in(cx) let mut value = UndefinedValue());
if!JS_GetPendingException(cx, value.handle_mut()) {
JS_ClearPendingException(cx);
error!("Uncaught exception: JS_GetPendingException failed");
return;
}
JS_ClearPendingException(cx);
let error_info = if value.is_object() {
rooted!(in(cx) let object = value.to_object());
ErrorInfo::from_native_error(cx, object.handle())
.or_else(|| ErrorInfo::from_dom_exception(object.handle()))
.unwrap_or_else(|| ErrorInfo {
message: format!("uncaught exception: unknown (can't convert to string)"),
filename: String::new(),
lineno: 0,
column: 0,
})
} else {
match USVString::from_jsval(cx, value.handle(), ()) {
Ok(ConversionResult::Success(USVString(string))) => ErrorInfo {
message: format!("uncaught exception: {}", string),
filename: String::new(),
lineno: 0,
column: 0,
},
_ => {
panic!("Uncaught exception: failed to stringify primitive");
},
}
};
error!(
"Error at {}:{}:{} {}",
error_info.filename, error_info.lineno, error_info.column, error_info.message
);
#[cfg(feature = "js_backtrace")]
{
LAST_EXCEPTION_BACKTRACE.with(|backtrace| {
if let Some((js_backtrace, rust_backtrace)) = backtrace.borrow_mut().take() {
if let Some(stack) = js_backtrace {
eprintln!("JS backtrace:\n{}", stack);
}
eprintln!("Rust backtrace:\n{}", rust_backtrace);
}
});
}
if dispatch_event {
GlobalScope::from_context(cx).report_an_error(error_info, value.handle());
}
}
/// Throw an exception to signal that a `JSObject` can not be converted to a
/// given DOM type.
pub unsafe fn throw_invalid_this(cx: *mut JSContext, proto_id: u16) |
impl Error {
/// Convert this error value to a JS value, consuming it in the process.
pub unsafe fn to_jsval(
self,
cx: *mut JSContext,
global: &GlobalScope,
rval: MutableHandleValue,
) {
assert!(!JS_IsExceptionPending(cx));
throw_dom_exception(cx, global, self);
assert!(JS_IsExceptionPending(cx));
assert!(JS_GetPendingException(cx, rval));
JS_ClearPendingException(cx);
}
}
| {
debug_assert!(!JS_IsExceptionPending(cx));
let error = format!(
"\"this\" object does not implement interface {}.",
proto_id_to_name(proto_id)
);
throw_type_error(cx, &error);
} | identifier_body |
controller.rs | pub struct Controller<T: PartialEq> {
pub dpad: DPad<T>,
}
impl<T: PartialEq> Controller<T> {
pub fn new(up: T, left: T, down: T, right: T) -> Controller<T> {
Controller {
dpad: DPad::new(up, left, down, right),
}
}
pub fn down(&mut self, key: &T) {
self.dpad.down(key);
}
pub fn up(&mut self, key: &T) {
self.dpad.up(key);
}
}
pub struct DPad<T: PartialEq> {
bindings: [T; 4],
state: [bool; 4],
}
impl<T: PartialEq> DPad<T> {
pub fn new(up: T, left: T, down: T, right: T) -> DPad<T> {
DPad {
bindings: [up, left, down, right],
state: [false; 4],
}
}
pub fn down(&mut self, key: &T) {
for (idx, bind) in self.bindings.iter().enumerate() {
if key == bind {
self.state[idx] = true;
}
}
}
pub fn up(&mut self, key: &T) {
for (idx, bind) in self.bindings.iter().enumerate() {
if key == bind {
self.state[idx] = false;
}
}
}
pub fn flatten(&self) -> [f64; 2] {
let x = ((self.state[3] as i8) - (self.state[1] as i8)) as f64;
let y = ((self.state[2] as i8) - (self.state[0] as i8)) as f64;
let z = if x == 0.0 && y == 0.0 | else {
(x.powi(2) + y.powi(2)).sqrt()
};
[x / z, y / z]
}
}
| {
1.0
} | conditional_block |
controller.rs | pub struct Controller<T: PartialEq> {
pub dpad: DPad<T>,
}
impl<T: PartialEq> Controller<T> {
pub fn new(up: T, left: T, down: T, right: T) -> Controller<T> {
Controller {
dpad: DPad::new(up, left, down, right),
}
}
pub fn down(&mut self, key: &T) {
self.dpad.down(key);
}
pub fn up(&mut self, key: &T) {
self.dpad.up(key);
}
}
pub struct DPad<T: PartialEq> {
bindings: [T; 4],
state: [bool; 4],
}
impl<T: PartialEq> DPad<T> {
pub fn new(up: T, left: T, down: T, right: T) -> DPad<T> {
DPad {
bindings: [up, left, down, right], |
pub fn down(&mut self, key: &T) {
for (idx, bind) in self.bindings.iter().enumerate() {
if key == bind {
self.state[idx] = true;
}
}
}
pub fn up(&mut self, key: &T) {
for (idx, bind) in self.bindings.iter().enumerate() {
if key == bind {
self.state[idx] = false;
}
}
}
pub fn flatten(&self) -> [f64; 2] {
let x = ((self.state[3] as i8) - (self.state[1] as i8)) as f64;
let y = ((self.state[2] as i8) - (self.state[0] as i8)) as f64;
let z = if x == 0.0 && y == 0.0 {
1.0
} else {
(x.powi(2) + y.powi(2)).sqrt()
};
[x / z, y / z]
}
} | state: [false; 4],
}
} | random_line_split |
controller.rs | pub struct Controller<T: PartialEq> {
pub dpad: DPad<T>,
}
impl<T: PartialEq> Controller<T> {
pub fn | (up: T, left: T, down: T, right: T) -> Controller<T> {
Controller {
dpad: DPad::new(up, left, down, right),
}
}
pub fn down(&mut self, key: &T) {
self.dpad.down(key);
}
pub fn up(&mut self, key: &T) {
self.dpad.up(key);
}
}
pub struct DPad<T: PartialEq> {
bindings: [T; 4],
state: [bool; 4],
}
impl<T: PartialEq> DPad<T> {
pub fn new(up: T, left: T, down: T, right: T) -> DPad<T> {
DPad {
bindings: [up, left, down, right],
state: [false; 4],
}
}
pub fn down(&mut self, key: &T) {
for (idx, bind) in self.bindings.iter().enumerate() {
if key == bind {
self.state[idx] = true;
}
}
}
pub fn up(&mut self, key: &T) {
for (idx, bind) in self.bindings.iter().enumerate() {
if key == bind {
self.state[idx] = false;
}
}
}
pub fn flatten(&self) -> [f64; 2] {
let x = ((self.state[3] as i8) - (self.state[1] as i8)) as f64;
let y = ((self.state[2] as i8) - (self.state[0] as i8)) as f64;
let z = if x == 0.0 && y == 0.0 {
1.0
} else {
(x.powi(2) + y.powi(2)).sqrt()
};
[x / z, y / z]
}
}
| new | identifier_name |
example.rs | use std::cmp::Ordering::Equal;
use std::collections::HashMap;
enum GameResult {
Win,
Draw,
Loss
}
struct TeamResult {
wins: u32,
draws: u32,
losses: u32,
}
impl TeamResult {
fn new() -> TeamResult {
TeamResult { wins: 0, draws: 0, losses: 0 }
}
fn add_game_result(&mut self, result: GameResult) {
match result {
GameResult::Win => self.wins += 1,
GameResult::Draw => self.draws += 1,
GameResult::Loss => self.losses += 1,
}
}
}
pub fn | (input: &str) -> String {
let mut results: HashMap<String, TeamResult> = HashMap::new();
for line in input.to_string().lines() {
let parts: Vec<&str> = line.trim_right().split(';').collect();
if parts.len()!= 3 { continue; }
let team1 = parts[0];
let team2 = parts[1];
let outcome = parts[2];
match outcome {
"win" => {
add_game_result(&mut results, team1.to_string(), GameResult::Win);
add_game_result(&mut results, team2.to_string(), GameResult::Loss);
},
"draw" => {
add_game_result(&mut results, team1.to_string(), GameResult::Draw);
add_game_result(&mut results, team2.to_string(), GameResult::Draw);
},
"loss" => {
add_game_result(&mut results, team1.to_string(), GameResult::Loss);
add_game_result(&mut results, team2.to_string(), GameResult::Win);
},
_ => () // Ignore bad lines
}
}
write_tally(&results)
}
fn write_tally(results: &HashMap<String, TeamResult>) -> String {
let mut v: Vec<_> = results.iter().map(|(team, r)| {
let games = r.wins + r.draws + r.losses;
let points = r.wins * 3 + r.draws;
(team, games, r, points)
}).collect();
// Sort by points descending, then name A-Z.
v.sort_by(|a, b|
match a.3.cmp(&(b.3)).reverse() {
Equal => a.0.cmp(&(b.0)),
other => other
});
let mut lines = vec![format!("{:30} | MP | W | D | L | P", "Team")];
lines.extend(v.iter().map(|&(ref team, games, r, points)| {
format!("{:30} | {:2} | {:2} | {:2} | {:2} | {:2}",
team, games, r.wins, r.draws, r.losses, points)
}));
lines.join("\n")
}
fn add_game_result(results: &mut HashMap<String, TeamResult>, team: String, result: GameResult) {
results.entry(team).or_insert(TeamResult::new()).add_game_result(result);
}
| tally | identifier_name |
example.rs | use std::cmp::Ordering::Equal;
use std::collections::HashMap;
enum GameResult {
Win,
Draw,
Loss
}
struct TeamResult {
wins: u32,
draws: u32,
losses: u32,
}
impl TeamResult {
fn new() -> TeamResult {
TeamResult { wins: 0, draws: 0, losses: 0 }
}
fn add_game_result(&mut self, result: GameResult) {
match result {
GameResult::Win => self.wins += 1,
GameResult::Draw => self.draws += 1,
GameResult::Loss => self.losses += 1,
}
}
} | let mut results: HashMap<String, TeamResult> = HashMap::new();
for line in input.to_string().lines() {
let parts: Vec<&str> = line.trim_right().split(';').collect();
if parts.len()!= 3 { continue; }
let team1 = parts[0];
let team2 = parts[1];
let outcome = parts[2];
match outcome {
"win" => {
add_game_result(&mut results, team1.to_string(), GameResult::Win);
add_game_result(&mut results, team2.to_string(), GameResult::Loss);
},
"draw" => {
add_game_result(&mut results, team1.to_string(), GameResult::Draw);
add_game_result(&mut results, team2.to_string(), GameResult::Draw);
},
"loss" => {
add_game_result(&mut results, team1.to_string(), GameResult::Loss);
add_game_result(&mut results, team2.to_string(), GameResult::Win);
},
_ => () // Ignore bad lines
}
}
write_tally(&results)
}
fn write_tally(results: &HashMap<String, TeamResult>) -> String {
let mut v: Vec<_> = results.iter().map(|(team, r)| {
let games = r.wins + r.draws + r.losses;
let points = r.wins * 3 + r.draws;
(team, games, r, points)
}).collect();
// Sort by points descending, then name A-Z.
v.sort_by(|a, b|
match a.3.cmp(&(b.3)).reverse() {
Equal => a.0.cmp(&(b.0)),
other => other
});
let mut lines = vec![format!("{:30} | MP | W | D | L | P", "Team")];
lines.extend(v.iter().map(|&(ref team, games, r, points)| {
format!("{:30} | {:2} | {:2} | {:2} | {:2} | {:2}",
team, games, r.wins, r.draws, r.losses, points)
}));
lines.join("\n")
}
fn add_game_result(results: &mut HashMap<String, TeamResult>, team: String, result: GameResult) {
results.entry(team).or_insert(TeamResult::new()).add_game_result(result);
} |
pub fn tally(input: &str) -> String { | random_line_split |
main.rs | use bitmap::Image;
// see read_ppm implementation in the bitmap library
pub fn main() {
// read a PPM image, which was produced by the write-a-ppm-file task
let image = Image::read_ppm("./test_image.ppm").unwrap();
println!("Read using nom parsing:");
println!("Format: {:?}", image.format); | mod tests {
extern crate rand;
use bitmap::{Color, Image};
use std::env;
#[test]
fn read_ppm() {
let mut image = Image::new(2, 1);
image[(0, 0)] = Color {
red: 255,
green: 0,
blue: 0,
};
image[(1, 0)] = Color {
red: 0,
green: 255,
blue: 0,
};
let fname = format!(
"{}/test-{}.ppm",
env::temp_dir().to_str().unwrap(),
self::rand::random::<i32>(),
);
image.write_ppm(&fname).unwrap();
image = Image::read_ppm(&fname).unwrap();
assert_eq!(image.width, 2);
assert_eq!(image.height, 1);
assert_eq!(
image.data,
vec![
Color {
red: 255,
green: 0,
blue: 0
},
Color {
red: 0,
green: 255,
blue: 0
}
]
)
}
} | println!("Dimensions: {} x {}", image.height, image.width);
}
#[cfg(test)] | random_line_split |
main.rs | use bitmap::Image;
// see read_ppm implementation in the bitmap library
pub fn main() |
#[cfg(test)]
mod tests {
extern crate rand;
use bitmap::{Color, Image};
use std::env;
#[test]
fn read_ppm() {
let mut image = Image::new(2, 1);
image[(0, 0)] = Color {
red: 255,
green: 0,
blue: 0,
};
image[(1, 0)] = Color {
red: 0,
green: 255,
blue: 0,
};
let fname = format!(
"{}/test-{}.ppm",
env::temp_dir().to_str().unwrap(),
self::rand::random::<i32>(),
);
image.write_ppm(&fname).unwrap();
image = Image::read_ppm(&fname).unwrap();
assert_eq!(image.width, 2);
assert_eq!(image.height, 1);
assert_eq!(
image.data,
vec![
Color {
red: 255,
green: 0,
blue: 0
},
Color {
red: 0,
green: 255,
blue: 0
}
]
)
}
}
| {
// read a PPM image, which was produced by the write-a-ppm-file task
let image = Image::read_ppm("./test_image.ppm").unwrap();
println!("Read using nom parsing:");
println!("Format: {:?}", image.format);
println!("Dimensions: {} x {}", image.height, image.width);
} | identifier_body |
main.rs | use bitmap::Image;
// see read_ppm implementation in the bitmap library
pub fn main() {
// read a PPM image, which was produced by the write-a-ppm-file task
let image = Image::read_ppm("./test_image.ppm").unwrap();
println!("Read using nom parsing:");
println!("Format: {:?}", image.format);
println!("Dimensions: {} x {}", image.height, image.width);
}
#[cfg(test)]
mod tests {
extern crate rand;
use bitmap::{Color, Image};
use std::env;
#[test]
fn | () {
let mut image = Image::new(2, 1);
image[(0, 0)] = Color {
red: 255,
green: 0,
blue: 0,
};
image[(1, 0)] = Color {
red: 0,
green: 255,
blue: 0,
};
let fname = format!(
"{}/test-{}.ppm",
env::temp_dir().to_str().unwrap(),
self::rand::random::<i32>(),
);
image.write_ppm(&fname).unwrap();
image = Image::read_ppm(&fname).unwrap();
assert_eq!(image.width, 2);
assert_eq!(image.height, 1);
assert_eq!(
image.data,
vec![
Color {
red: 255,
green: 0,
blue: 0
},
Color {
red: 0,
green: 255,
blue: 0
}
]
)
}
}
| read_ppm | identifier_name |
lib.rs | pub mod one;
pub mod two;
pub mod ffigen;
//Integer marshaling
#[no_mangle]
pub extern fn test_u8(p: u8) -> u8 {
p
}
#[no_mangle]
pub extern fn test_u16(p: u16) -> u16 {
p
}
#[no_mangle]
pub extern fn test_u32(p: u32) -> u32 {
p
}
#[no_mangle]
pub extern fn test_i8(p: i8) -> i8 {
p
}
#[no_mangle]
pub extern fn test_i16(p: i16) -> i16 {
p
}
#[no_mangle]
pub extern fn test_i32(p: i32) -> i32 {
p
}
//Float marshaling
#[no_mangle]
pub extern fn te | : f32) -> f32 {
p
}
#[no_mangle]
pub extern fn test_f64(p: f64) -> f64 {
p
}
//Boolean marshaling
#[no_mangle]
pub extern fn test_bool(p: bool) -> bool {
p == true
}
//String marshaling
#[no_mangle]
pub extern fn test_string(p: String) -> String {
p.clone()
}
#[no_mangle]
pub extern fn test_string_ref(p: &String) -> String {
p.clone()
}
#[no_mangle]
pub extern fn test_str_ref(p: &str) -> String {
p.to_string()
} | st_f32(p | identifier_name |
lib.rs | pub mod one;
pub mod two;
pub mod ffigen;
//Integer marshaling
#[no_mangle]
pub extern fn test_u8(p: u8) -> u8 {
p
}
#[no_mangle]
pub extern fn test_u16(p: u16) -> u16 {
| #[no_mangle]
pub extern fn test_u32(p: u32) -> u32 {
p
}
#[no_mangle]
pub extern fn test_i8(p: i8) -> i8 {
p
}
#[no_mangle]
pub extern fn test_i16(p: i16) -> i16 {
p
}
#[no_mangle]
pub extern fn test_i32(p: i32) -> i32 {
p
}
//Float marshaling
#[no_mangle]
pub extern fn test_f32(p: f32) -> f32 {
p
}
#[no_mangle]
pub extern fn test_f64(p: f64) -> f64 {
p
}
//Boolean marshaling
#[no_mangle]
pub extern fn test_bool(p: bool) -> bool {
p == true
}
//String marshaling
#[no_mangle]
pub extern fn test_string(p: String) -> String {
p.clone()
}
#[no_mangle]
pub extern fn test_string_ref(p: &String) -> String {
p.clone()
}
#[no_mangle]
pub extern fn test_str_ref(p: &str) -> String {
p.to_string()
} | p
}
| identifier_body |
lib.rs | pub mod one;
pub mod two;
pub mod ffigen;
//Integer marshaling
#[no_mangle]
pub extern fn test_u8(p: u8) -> u8 {
p
}
#[no_mangle]
pub extern fn test_u16(p: u16) -> u16 {
p
}
#[no_mangle]
pub extern fn test_u32(p: u32) -> u32 {
p
}
#[no_mangle]
pub extern fn test_i8(p: i8) -> i8 {
p
}
#[no_mangle]
pub extern fn test_i16(p: i16) -> i16 {
p
}
#[no_mangle]
pub extern fn test_i32(p: i32) -> i32 {
p
}
//Float marshaling
#[no_mangle]
pub extern fn test_f32(p: f32) -> f32 {
p
}
#[no_mangle]
pub extern fn test_f64(p: f64) -> f64 {
p
}
//Boolean marshaling
#[no_mangle]
pub extern fn test_bool(p: bool) -> bool {
p == true
}
//String marshaling
#[no_mangle]
pub extern fn test_string(p: String) -> String {
p.clone()
}
#[no_mangle]
pub extern fn test_string_ref(p: &String) -> String {
p.clone()
}
#[no_mangle]
pub extern fn test_str_ref(p: &str) -> String { | p.to_string()
} | random_line_split |
|
test_poll.rs | use nix::{
Error,
errno::Errno,
poll::{PollFlags, poll, PollFd},
unistd::{write, pipe}
};
macro_rules! loop_while_eintr {
($poll_expr: expr) => {
loop {
match $poll_expr {
Ok(nfds) => break nfds,
Err(Error::Sys(Errno::EINTR)) => (),
Err(e) => panic!(e)
}
}
}
}
#[test]
fn test_poll() {
let (r, w) = pipe().unwrap();
let mut fds = [PollFd::new(r, PollFlags::POLLIN)];
// Poll an idle pipe. Should timeout
let nfds = loop_while_eintr!(poll(&mut fds, 100));
assert_eq!(nfds, 0);
assert!(!fds[0].revents().unwrap().contains(PollFlags::POLLIN));
| write(w, b".").unwrap();
// Poll a readable pipe. Should return an event.
let nfds = poll(&mut fds, 100).unwrap();
assert_eq!(nfds, 1);
assert!(fds[0].revents().unwrap().contains(PollFlags::POLLIN));
}
// ppoll(2) is the same as poll except for how it handles timeouts and signals.
// Repeating the test for poll(2) should be sufficient to check that our
// bindings are correct.
#[cfg(any(target_os = "android",
target_os = "dragonfly",
target_os = "freebsd",
target_os = "linux"))]
#[test]
fn test_ppoll() {
use nix::poll::ppoll;
use nix::sys::signal::SigSet;
use nix::sys::time::{TimeSpec, TimeValLike};
let timeout = TimeSpec::milliseconds(1);
let (r, w) = pipe().unwrap();
let mut fds = [PollFd::new(r, PollFlags::POLLIN)];
// Poll an idle pipe. Should timeout
let sigset = SigSet::empty();
let nfds = loop_while_eintr!(ppoll(&mut fds, Some(timeout), sigset));
assert_eq!(nfds, 0);
assert!(!fds[0].revents().unwrap().contains(PollFlags::POLLIN));
write(w, b".").unwrap();
// Poll a readable pipe. Should return an event.
let nfds = ppoll(&mut fds, Some(timeout), SigSet::empty()).unwrap();
assert_eq!(nfds, 1);
assert!(fds[0].revents().unwrap().contains(PollFlags::POLLIN));
} | random_line_split |
|
test_poll.rs | use nix::{
Error,
errno::Errno,
poll::{PollFlags, poll, PollFd},
unistd::{write, pipe}
};
macro_rules! loop_while_eintr {
($poll_expr: expr) => {
loop {
match $poll_expr {
Ok(nfds) => break nfds,
Err(Error::Sys(Errno::EINTR)) => (),
Err(e) => panic!(e)
}
}
}
}
#[test]
fn | () {
let (r, w) = pipe().unwrap();
let mut fds = [PollFd::new(r, PollFlags::POLLIN)];
// Poll an idle pipe. Should timeout
let nfds = loop_while_eintr!(poll(&mut fds, 100));
assert_eq!(nfds, 0);
assert!(!fds[0].revents().unwrap().contains(PollFlags::POLLIN));
write(w, b".").unwrap();
// Poll a readable pipe. Should return an event.
let nfds = poll(&mut fds, 100).unwrap();
assert_eq!(nfds, 1);
assert!(fds[0].revents().unwrap().contains(PollFlags::POLLIN));
}
// ppoll(2) is the same as poll except for how it handles timeouts and signals.
// Repeating the test for poll(2) should be sufficient to check that our
// bindings are correct.
#[cfg(any(target_os = "android",
target_os = "dragonfly",
target_os = "freebsd",
target_os = "linux"))]
#[test]
fn test_ppoll() {
use nix::poll::ppoll;
use nix::sys::signal::SigSet;
use nix::sys::time::{TimeSpec, TimeValLike};
let timeout = TimeSpec::milliseconds(1);
let (r, w) = pipe().unwrap();
let mut fds = [PollFd::new(r, PollFlags::POLLIN)];
// Poll an idle pipe. Should timeout
let sigset = SigSet::empty();
let nfds = loop_while_eintr!(ppoll(&mut fds, Some(timeout), sigset));
assert_eq!(nfds, 0);
assert!(!fds[0].revents().unwrap().contains(PollFlags::POLLIN));
write(w, b".").unwrap();
// Poll a readable pipe. Should return an event.
let nfds = ppoll(&mut fds, Some(timeout), SigSet::empty()).unwrap();
assert_eq!(nfds, 1);
assert!(fds[0].revents().unwrap().contains(PollFlags::POLLIN));
}
| test_poll | identifier_name |
test_poll.rs | use nix::{
Error,
errno::Errno,
poll::{PollFlags, poll, PollFd},
unistd::{write, pipe}
};
macro_rules! loop_while_eintr {
($poll_expr: expr) => {
loop {
match $poll_expr {
Ok(nfds) => break nfds,
Err(Error::Sys(Errno::EINTR)) => (),
Err(e) => panic!(e)
}
}
}
}
#[test]
fn test_poll() |
// ppoll(2) is the same as poll except for how it handles timeouts and signals.
// Repeating the test for poll(2) should be sufficient to check that our
// bindings are correct.
#[cfg(any(target_os = "android",
target_os = "dragonfly",
target_os = "freebsd",
target_os = "linux"))]
#[test]
fn test_ppoll() {
use nix::poll::ppoll;
use nix::sys::signal::SigSet;
use nix::sys::time::{TimeSpec, TimeValLike};
let timeout = TimeSpec::milliseconds(1);
let (r, w) = pipe().unwrap();
let mut fds = [PollFd::new(r, PollFlags::POLLIN)];
// Poll an idle pipe. Should timeout
let sigset = SigSet::empty();
let nfds = loop_while_eintr!(ppoll(&mut fds, Some(timeout), sigset));
assert_eq!(nfds, 0);
assert!(!fds[0].revents().unwrap().contains(PollFlags::POLLIN));
write(w, b".").unwrap();
// Poll a readable pipe. Should return an event.
let nfds = ppoll(&mut fds, Some(timeout), SigSet::empty()).unwrap();
assert_eq!(nfds, 1);
assert!(fds[0].revents().unwrap().contains(PollFlags::POLLIN));
}
| {
let (r, w) = pipe().unwrap();
let mut fds = [PollFd::new(r, PollFlags::POLLIN)];
// Poll an idle pipe. Should timeout
let nfds = loop_while_eintr!(poll(&mut fds, 100));
assert_eq!(nfds, 0);
assert!(!fds[0].revents().unwrap().contains(PollFlags::POLLIN));
write(w, b".").unwrap();
// Poll a readable pipe. Should return an event.
let nfds = poll(&mut fds, 100).unwrap();
assert_eq!(nfds, 1);
assert!(fds[0].revents().unwrap().contains(PollFlags::POLLIN));
} | identifier_body |
tag-align-shape.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-test
//
// See issue #1535
tag a_tag {
a_tag(u64);
}
type t_rec = {
c8: u8,
t: a_tag
};
pub fn | () {
let x = {c8: 22u8, t: a_tag(44u64)};
let y = fmt!("%?", x);
debug!("y = %s", y);
assert!(y == "(22, a_tag(44))");
}
| main | identifier_name |
tag-align-shape.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-test
//
// See issue #1535
tag a_tag {
a_tag(u64);
}
type t_rec = {
c8: u8,
t: a_tag
};
pub fn main() | {
let x = {c8: 22u8, t: a_tag(44u64)};
let y = fmt!("%?", x);
debug!("y = %s", y);
assert!(y == "(22, a_tag(44))");
} | identifier_body |
|
tag-align-shape.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-test
//
// See issue #1535
tag a_tag {
a_tag(u64);
}
type t_rec = {
c8: u8,
t: a_tag
};
pub fn main() {
let x = {c8: 22u8, t: a_tag(44u64)};
let y = fmt!("%?", x); | debug!("y = %s", y);
assert!(y == "(22, a_tag(44))");
} | random_line_split |
|
callbacks.rs | use std::panic;
use std::ffi::CStr;
use printf::printf;
use alpm_sys::*;
use libc::{c_int, c_char, c_void, off_t};
use {LOG_CB, DOWNLOAD_CB, DLTOTAL_CB, FETCH_CB, EVENT_CB, DownloadResult};
use event::Event;
/// Function with C calling convention and required type signature to wrap our callback
pub unsafe extern "C" fn alpm_cb_log(level: alpm_loglevel_t,
fmt: *const c_char,
args: *const Struct_va_list) {
let out = printf(fmt, args as *mut c_void);
panic::catch_unwind(|| {
let mut cb = LOG_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(level.into(), out);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
/** Type of download progress callbacks.
* @param filename the name of the file being downloaded
* @param xfered the number of transferred bytes
* @param total the total number of bytes to transfer
*/
pub unsafe extern "C" fn alpm_cb_download(filename: *const c_char, xfered: off_t, total: off_t) {
let filename = CStr::from_ptr(filename).to_string_lossy();
let xfered = xfered as u64;
let total = total as u64;
panic::catch_unwind(|| {
let mut cb = DOWNLOAD_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(filename.as_ref(), xfered, total);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
/** Type of download progress callbacks.
* @param filename the name of the file being downloaded
* @param xfered the number of transferred bytes
* @param total the total number of bytes to transfer | */
pub unsafe extern "C" fn alpm_cb_totaldl(total: off_t) {
let total = total as u64;
panic::catch_unwind(|| {
let mut cb = DLTOTAL_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(total);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
/** A callback for downloading files
* @param url the URL of the file to be downloaded
* @param localpath the directory to which the file should be downloaded
* @param force whether to force an update, even if the file is the same
* @return 0 on success, 1 if the file exists and is identical, -1 on
* error.
*/
pub unsafe extern "C" fn alpm_cb_fetch(url: *const c_char,
localpath: *const c_char,
force: c_int) -> c_int
{
let url = CStr::from_ptr(url).to_string_lossy();
let localpath = CStr::from_ptr(localpath).to_string_lossy();
let force =! force == 0;
panic::catch_unwind(|| {
let mut cb = FETCH_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
match cb(url.as_ref(), localpath.as_ref(), force) {
DownloadResult::Ok => 0,
DownloadResult::NotNeeded => 1,
DownloadResult::Err => -1,
}
} else {
-1
}
}).unwrap_or(-1) // set error code if we have panicked
}
/** Event callback */
pub unsafe extern "C" fn alpm_cb_event(evt: *const alpm_event_t) {
let evt = Event::new(evt);
panic::catch_unwind(|| {
let mut cb = EVENT_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(evt);
}
}).unwrap_or(())
} | random_line_split |
|
callbacks.rs | use std::panic;
use std::ffi::CStr;
use printf::printf;
use alpm_sys::*;
use libc::{c_int, c_char, c_void, off_t};
use {LOG_CB, DOWNLOAD_CB, DLTOTAL_CB, FETCH_CB, EVENT_CB, DownloadResult};
use event::Event;
/// Function with C calling convention and required type signature to wrap our callback
pub unsafe extern "C" fn alpm_cb_log(level: alpm_loglevel_t,
fmt: *const c_char,
args: *const Struct_va_list) {
let out = printf(fmt, args as *mut c_void);
panic::catch_unwind(|| {
let mut cb = LOG_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(level.into(), out);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
/** Type of download progress callbacks.
* @param filename the name of the file being downloaded
* @param xfered the number of transferred bytes
* @param total the total number of bytes to transfer
*/
pub unsafe extern "C" fn alpm_cb_download(filename: *const c_char, xfered: off_t, total: off_t) {
let filename = CStr::from_ptr(filename).to_string_lossy();
let xfered = xfered as u64;
let total = total as u64;
panic::catch_unwind(|| {
let mut cb = DOWNLOAD_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(filename.as_ref(), xfered, total);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
/** Type of download progress callbacks.
* @param filename the name of the file being downloaded
* @param xfered the number of transferred bytes
* @param total the total number of bytes to transfer
*/
pub unsafe extern "C" fn | (total: off_t) {
let total = total as u64;
panic::catch_unwind(|| {
let mut cb = DLTOTAL_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(total);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
/** A callback for downloading files
* @param url the URL of the file to be downloaded
* @param localpath the directory to which the file should be downloaded
* @param force whether to force an update, even if the file is the same
* @return 0 on success, 1 if the file exists and is identical, -1 on
* error.
*/
pub unsafe extern "C" fn alpm_cb_fetch(url: *const c_char,
localpath: *const c_char,
force: c_int) -> c_int
{
let url = CStr::from_ptr(url).to_string_lossy();
let localpath = CStr::from_ptr(localpath).to_string_lossy();
let force =! force == 0;
panic::catch_unwind(|| {
let mut cb = FETCH_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
match cb(url.as_ref(), localpath.as_ref(), force) {
DownloadResult::Ok => 0,
DownloadResult::NotNeeded => 1,
DownloadResult::Err => -1,
}
} else {
-1
}
}).unwrap_or(-1) // set error code if we have panicked
}
/** Event callback */
pub unsafe extern "C" fn alpm_cb_event(evt: *const alpm_event_t) {
let evt = Event::new(evt);
panic::catch_unwind(|| {
let mut cb = EVENT_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(evt);
}
}).unwrap_or(())
}
| alpm_cb_totaldl | identifier_name |
callbacks.rs | use std::panic;
use std::ffi::CStr;
use printf::printf;
use alpm_sys::*;
use libc::{c_int, c_char, c_void, off_t};
use {LOG_CB, DOWNLOAD_CB, DLTOTAL_CB, FETCH_CB, EVENT_CB, DownloadResult};
use event::Event;
/// Function with C calling convention and required type signature to wrap our callback
pub unsafe extern "C" fn alpm_cb_log(level: alpm_loglevel_t,
fmt: *const c_char,
args: *const Struct_va_list) {
let out = printf(fmt, args as *mut c_void);
panic::catch_unwind(|| {
let mut cb = LOG_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(level.into(), out);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
/** Type of download progress callbacks.
* @param filename the name of the file being downloaded
* @param xfered the number of transferred bytes
* @param total the total number of bytes to transfer
*/
pub unsafe extern "C" fn alpm_cb_download(filename: *const c_char, xfered: off_t, total: off_t) {
let filename = CStr::from_ptr(filename).to_string_lossy();
let xfered = xfered as u64;
let total = total as u64;
panic::catch_unwind(|| {
let mut cb = DOWNLOAD_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(filename.as_ref(), xfered, total);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
/** Type of download progress callbacks.
* @param filename the name of the file being downloaded
* @param xfered the number of transferred bytes
* @param total the total number of bytes to transfer
*/
pub unsafe extern "C" fn alpm_cb_totaldl(total: off_t) {
let total = total as u64;
panic::catch_unwind(|| {
let mut cb = DLTOTAL_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(total);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
/** A callback for downloading files
* @param url the URL of the file to be downloaded
* @param localpath the directory to which the file should be downloaded
* @param force whether to force an update, even if the file is the same
* @return 0 on success, 1 if the file exists and is identical, -1 on
* error.
*/
pub unsafe extern "C" fn alpm_cb_fetch(url: *const c_char,
localpath: *const c_char,
force: c_int) -> c_int
{
let url = CStr::from_ptr(url).to_string_lossy();
let localpath = CStr::from_ptr(localpath).to_string_lossy();
let force =! force == 0;
panic::catch_unwind(|| {
let mut cb = FETCH_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
match cb(url.as_ref(), localpath.as_ref(), force) {
DownloadResult::Ok => 0,
DownloadResult::NotNeeded => 1,
DownloadResult::Err => -1,
}
} else {
-1
}
}).unwrap_or(-1) // set error code if we have panicked
}
/** Event callback */
pub unsafe extern "C" fn alpm_cb_event(evt: *const alpm_event_t) | {
let evt = Event::new(evt);
panic::catch_unwind(|| {
let mut cb = EVENT_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(evt);
}
}).unwrap_or(())
} | identifier_body |
|
callbacks.rs | use std::panic;
use std::ffi::CStr;
use printf::printf;
use alpm_sys::*;
use libc::{c_int, c_char, c_void, off_t};
use {LOG_CB, DOWNLOAD_CB, DLTOTAL_CB, FETCH_CB, EVENT_CB, DownloadResult};
use event::Event;
/// Function with C calling convention and required type signature to wrap our callback
pub unsafe extern "C" fn alpm_cb_log(level: alpm_loglevel_t,
fmt: *const c_char,
args: *const Struct_va_list) {
let out = printf(fmt, args as *mut c_void);
panic::catch_unwind(|| {
let mut cb = LOG_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(level.into(), out);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
/** Type of download progress callbacks.
* @param filename the name of the file being downloaded
* @param xfered the number of transferred bytes
* @param total the total number of bytes to transfer
*/
pub unsafe extern "C" fn alpm_cb_download(filename: *const c_char, xfered: off_t, total: off_t) {
let filename = CStr::from_ptr(filename).to_string_lossy();
let xfered = xfered as u64;
let total = total as u64;
panic::catch_unwind(|| {
let mut cb = DOWNLOAD_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(filename.as_ref(), xfered, total);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
/** Type of download progress callbacks.
* @param filename the name of the file being downloaded
* @param xfered the number of transferred bytes
* @param total the total number of bytes to transfer
*/
pub unsafe extern "C" fn alpm_cb_totaldl(total: off_t) {
let total = total as u64;
panic::catch_unwind(|| {
let mut cb = DLTOTAL_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(total);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
/** A callback for downloading files
* @param url the URL of the file to be downloaded
* @param localpath the directory to which the file should be downloaded
* @param force whether to force an update, even if the file is the same
* @return 0 on success, 1 if the file exists and is identical, -1 on
* error.
*/
pub unsafe extern "C" fn alpm_cb_fetch(url: *const c_char,
localpath: *const c_char,
force: c_int) -> c_int
{
let url = CStr::from_ptr(url).to_string_lossy();
let localpath = CStr::from_ptr(localpath).to_string_lossy();
let force =! force == 0;
panic::catch_unwind(|| {
let mut cb = FETCH_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
match cb(url.as_ref(), localpath.as_ref(), force) {
DownloadResult::Ok => 0,
DownloadResult::NotNeeded => 1,
DownloadResult::Err => -1,
}
} else |
}).unwrap_or(-1) // set error code if we have panicked
}
/** Event callback */
pub unsafe extern "C" fn alpm_cb_event(evt: *const alpm_event_t) {
let evt = Event::new(evt);
panic::catch_unwind(|| {
let mut cb = EVENT_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(evt);
}
}).unwrap_or(())
}
| {
-1
} | conditional_block |
entries.rs | use std::env;
use std::error::Error;
use std::fs::{self, File};
use std::io::{self, Read, Write};
use std::path::{Path, PathBuf};
use controllers::prelude::*;
use models::{queries, Tag};
use views;
use util;
use aqua_web::plug;
use aqua_web::mw::forms::{MultipartForm, SavedFile};
use aqua_web::mw::router::Router;
use glob::glob;
use image::{self, FilterType, ImageFormat, ImageResult};
use serde_json;
#[derive(Serialize)]
struct TagView {
tags: Vec<Tag>,
}
fn glob_for_category(category: &str, digest: &str) -> String {
// TODO: assert digest is really a digest
// TODO: assert category is really a category
PathBuf::from(env::var("CONTENT_STORE").unwrap())
.join(format!("{}{}", category, &digest[..2]))
.join(&digest)
.with_extension("*")
.to_string_lossy()
.into_owned()
}
/// Fetch the file for a given entry ID
/// `GET /show/{id}`
pub fn show(conn: &mut plug::Conn) | }
}
pub fn show_thumb(conn: &mut plug::Conn) {
let file_id = Router::param::<i64>(conn, "id")
.expect("missing route param: id");
match queries::find_entry(conn, file_id) {
Ok(entry) => {
let glob_pattern = glob_for_category("t", &entry.hash);
info!("glob pattern: {}", glob_pattern);
let paths = glob(&glob_pattern)
.expect("could not parse glob pattern")
.map(|res| res.ok().unwrap())
.collect::<Vec<PathBuf>>();
assert_eq!(paths.len(), 1);
conn.send_file(200, &paths[0]);
// conn.send_resp(200, &path_glob);
},
Err(err) => conn.send_resp(404, err.description()),
}
}
/// `GET /entries/{id}/tags`
///
/// Gets a view fragment to show and modify the tags.
pub fn show_entry_tags(conn: &mut plug::Conn) {
let entry_id = Router::param::<i64>(conn, "id")
.expect("missing route param: id");
let tags = queries::find_tags_for(conn, entry_id)
.expect("could not load tags");
let data = TagView { tags: tags };
let view = views::render(conn.req(), "tag/_panel", &data);
conn.send_resp(200, &view);
}
/// `POST /entries/upload`
///
/// Returns a `models::Entry` as JSON or an HTTP 500 error on failure.
/// Expects a multipart form containing a file payload in the field `upload`.
/// This payload is extracted and converted to a SHA-256 digest.
///
/// If the entry already exists it is returned immediately, otherwise it is
/// moved to the content addressable storage pool and the entry is created.
///
pub fn submit(conn: &mut plug::Conn) {
// TODO: handle webm, etc.
use models::queries;
// TODO: simpler way to get extensions
let mut form_fields = { conn.req_mut().mut_extensions().pop::<MultipartForm>() };
// NOTE: these are separate b/c we need to hang on to the file ref...
let file_upload = form_fields.as_mut()
.and_then(|form| extract_file(form, "upload"));
let digest = file_upload.as_ref()
.and_then(|file| util::processing::hash_file(file.path.as_path()).ok());
// TODO: these are gross b/c we can't return anything, thus there's no good
// way to use Result+`try!`...
let file_upload = match file_upload {
None => { conn.send_resp(500, "file upload missing?"); return },
Some(file_upload) => file_upload,
};
let digest = match digest {
None => { conn.send_resp(500, "file upload did not digest?"); return },
Some(digest) => digest,
};
info!("got file digest: {}", digest);
match queries::find_entry_by_hash(conn, &digest) {
Ok(Some(entry)) => send_json(conn, entry),
Ok(None) => write_entry(conn, digest, file_upload),
Err(msg) => conn.send_resp(500, &format!("could not load entry[{}]: {}", digest, msg)),
};
}
// TODO: pull this out to aqua web?
fn send_json<T: ::serde::ser::Serialize>(conn: &mut plug::Conn, json_payload: T) {
let output = serde_json::to_string(&json_payload)
.expect("could not serialize output!");
conn.send_resp(200, &output);
}
// TODO:???
fn write_entry(conn: &mut plug::Conn, digest: String, file: SavedFile) {
use models::{queries, NewEntry};
// open the file
let mut file = match File::open(file.path) {
Ok(file) => file,
Err(_msg) => { conn.send_resp(500, "could not open your upload..."); return },
};
// read into temp buffer
let mut buf = vec![];
let file_ty = match file.read_to_end(&mut buf) {
Ok(_size) => util::processing::detect_image(&buf[..]),
Err(_msg) => { conn.send_resp(500, "could not read your upload..."); return },
};
// create content aware address for it
let (content_path, thumb_path, content_name, file_ty) = match file_ty {
Some(file_ty) => (
format!("{}/f{}", env::var("CONTENT_STORE").unwrap(), &digest[..2]),
format!("{}/t{}", env::var("CONTENT_STORE").unwrap(), &digest[..2]),
format!("{}.{}", &digest[..], file_ty.extension()),
file_ty
),
None => { conn.send_resp(500, "unsupported mime type"); return },
};
// create buckets in content store
let dst_file_path = PathBuf::from(content_path);
if let Err(msg) = fs::create_dir_all(&dst_file_path) {
warn!("could not create content store bucket: {}", msg);
conn.send_resp(500, "could not add file to content store");
return
}
let dst_thumb_path = PathBuf::from(thumb_path);
if let Err(msg) = fs::create_dir_all(&dst_thumb_path) {
warn!("could not create content store bucket: {}", msg);
conn.send_resp(500, "could not add file to content store");
return
}
// copy thumbnail to bucket
let dst_file_name = dst_thumb_path.join(content_name.clone());
if let Err(msg) = store_thumbnail(&buf, &dst_file_name, file_ty.format()) {
warn!("error storing thumb: {:?}", msg);
conn.send_resp(500, "could not add thumb to content store");
return
}
// copy file to bucket
let dst_file_name = dst_file_path.join(content_name.clone());
let dst_file_copy = File::create(dst_file_name).and_then(|mut file| {
io::copy(&mut Cursor::new(buf), &mut file)
});
if let Err(msg) = dst_file_copy {
warn!("error storing file: {:?}", msg);
conn.send_resp(500, "could not add file to content store");
return
}
if let Err(msg) = dst_file_copy {
warn!("error storing file: {:?}", msg);
conn.send_resp(500, "could not add file to content store");
return
}
// store that sucker in the db...
match queries::find_or_insert(conn, NewEntry { hash: &digest, mime: Some(&file_ty.mime()) }) {
Some(entry) => send_json(conn, entry),
None=> conn.send_resp(500, "could not store entry in DB"),
}
}
fn store_thumbnail<P>(in_buf: &[u8], out_path: P, out_fmt: ImageFormat) -> ImageResult<()>
where P: AsRef<Path> {
let image = image::load_from_memory(in_buf)?;
let thumb = image.resize(200, 200, FilterType::Nearest);
let mut dest = File::create(out_path)?;
thumb.save(&mut dest, out_fmt)?;
dest.flush()?; Ok(())
}
| {
let file_id = Router::param::<i64>(conn, "id")
.expect("missing route param: id");
match queries::find_entry(conn, file_id) {
Ok(entry) => {
let glob_pattern = glob_for_category("f", &entry.hash);
info!("glob pattern: {}", glob_pattern);
let paths = glob(&glob_pattern)
.expect("could not parse glob pattern")
.map(|res| res.ok().unwrap())
.collect::<Vec<PathBuf>>();
assert_eq!(paths.len(), 1);
conn.send_file(200, &paths[0]);
// conn.send_resp(200, &path_glob);
},
Err(err) => conn.send_resp(404, err.description()), | identifier_body |
entries.rs | use std::env;
use std::error::Error;
use std::fs::{self, File};
use std::io::{self, Read, Write};
use std::path::{Path, PathBuf}; | use views;
use util;
use aqua_web::plug;
use aqua_web::mw::forms::{MultipartForm, SavedFile};
use aqua_web::mw::router::Router;
use glob::glob;
use image::{self, FilterType, ImageFormat, ImageResult};
use serde_json;
#[derive(Serialize)]
struct TagView {
tags: Vec<Tag>,
}
fn glob_for_category(category: &str, digest: &str) -> String {
// TODO: assert digest is really a digest
// TODO: assert category is really a category
PathBuf::from(env::var("CONTENT_STORE").unwrap())
.join(format!("{}{}", category, &digest[..2]))
.join(&digest)
.with_extension("*")
.to_string_lossy()
.into_owned()
}
/// Fetch the file for a given entry ID
/// `GET /show/{id}`
pub fn show(conn: &mut plug::Conn) {
let file_id = Router::param::<i64>(conn, "id")
.expect("missing route param: id");
match queries::find_entry(conn, file_id) {
Ok(entry) => {
let glob_pattern = glob_for_category("f", &entry.hash);
info!("glob pattern: {}", glob_pattern);
let paths = glob(&glob_pattern)
.expect("could not parse glob pattern")
.map(|res| res.ok().unwrap())
.collect::<Vec<PathBuf>>();
assert_eq!(paths.len(), 1);
conn.send_file(200, &paths[0]);
// conn.send_resp(200, &path_glob);
},
Err(err) => conn.send_resp(404, err.description()),
}
}
pub fn show_thumb(conn: &mut plug::Conn) {
let file_id = Router::param::<i64>(conn, "id")
.expect("missing route param: id");
match queries::find_entry(conn, file_id) {
Ok(entry) => {
let glob_pattern = glob_for_category("t", &entry.hash);
info!("glob pattern: {}", glob_pattern);
let paths = glob(&glob_pattern)
.expect("could not parse glob pattern")
.map(|res| res.ok().unwrap())
.collect::<Vec<PathBuf>>();
assert_eq!(paths.len(), 1);
conn.send_file(200, &paths[0]);
// conn.send_resp(200, &path_glob);
},
Err(err) => conn.send_resp(404, err.description()),
}
}
/// `GET /entries/{id}/tags`
///
/// Gets a view fragment to show and modify the tags.
pub fn show_entry_tags(conn: &mut plug::Conn) {
let entry_id = Router::param::<i64>(conn, "id")
.expect("missing route param: id");
let tags = queries::find_tags_for(conn, entry_id)
.expect("could not load tags");
let data = TagView { tags: tags };
let view = views::render(conn.req(), "tag/_panel", &data);
conn.send_resp(200, &view);
}
/// `POST /entries/upload`
///
/// Returns a `models::Entry` as JSON or an HTTP 500 error on failure.
/// Expects a multipart form containing a file payload in the field `upload`.
/// This payload is extracted and converted to a SHA-256 digest.
///
/// If the entry already exists it is returned immediately, otherwise it is
/// moved to the content addressable storage pool and the entry is created.
///
pub fn submit(conn: &mut plug::Conn) {
// TODO: handle webm, etc.
use models::queries;
// TODO: simpler way to get extensions
let mut form_fields = { conn.req_mut().mut_extensions().pop::<MultipartForm>() };
// NOTE: these are separate b/c we need to hang on to the file ref...
let file_upload = form_fields.as_mut()
.and_then(|form| extract_file(form, "upload"));
let digest = file_upload.as_ref()
.and_then(|file| util::processing::hash_file(file.path.as_path()).ok());
// TODO: these are gross b/c we can't return anything, thus there's no good
// way to use Result+`try!`...
let file_upload = match file_upload {
None => { conn.send_resp(500, "file upload missing?"); return },
Some(file_upload) => file_upload,
};
let digest = match digest {
None => { conn.send_resp(500, "file upload did not digest?"); return },
Some(digest) => digest,
};
info!("got file digest: {}", digest);
match queries::find_entry_by_hash(conn, &digest) {
Ok(Some(entry)) => send_json(conn, entry),
Ok(None) => write_entry(conn, digest, file_upload),
Err(msg) => conn.send_resp(500, &format!("could not load entry[{}]: {}", digest, msg)),
};
}
// TODO: pull this out to aqua web?
fn send_json<T: ::serde::ser::Serialize>(conn: &mut plug::Conn, json_payload: T) {
let output = serde_json::to_string(&json_payload)
.expect("could not serialize output!");
conn.send_resp(200, &output);
}
// TODO:???
fn write_entry(conn: &mut plug::Conn, digest: String, file: SavedFile) {
use models::{queries, NewEntry};
// open the file
let mut file = match File::open(file.path) {
Ok(file) => file,
Err(_msg) => { conn.send_resp(500, "could not open your upload..."); return },
};
// read into temp buffer
let mut buf = vec![];
let file_ty = match file.read_to_end(&mut buf) {
Ok(_size) => util::processing::detect_image(&buf[..]),
Err(_msg) => { conn.send_resp(500, "could not read your upload..."); return },
};
// create content aware address for it
let (content_path, thumb_path, content_name, file_ty) = match file_ty {
Some(file_ty) => (
format!("{}/f{}", env::var("CONTENT_STORE").unwrap(), &digest[..2]),
format!("{}/t{}", env::var("CONTENT_STORE").unwrap(), &digest[..2]),
format!("{}.{}", &digest[..], file_ty.extension()),
file_ty
),
None => { conn.send_resp(500, "unsupported mime type"); return },
};
// create buckets in content store
let dst_file_path = PathBuf::from(content_path);
if let Err(msg) = fs::create_dir_all(&dst_file_path) {
warn!("could not create content store bucket: {}", msg);
conn.send_resp(500, "could not add file to content store");
return
}
let dst_thumb_path = PathBuf::from(thumb_path);
if let Err(msg) = fs::create_dir_all(&dst_thumb_path) {
warn!("could not create content store bucket: {}", msg);
conn.send_resp(500, "could not add file to content store");
return
}
// copy thumbnail to bucket
let dst_file_name = dst_thumb_path.join(content_name.clone());
if let Err(msg) = store_thumbnail(&buf, &dst_file_name, file_ty.format()) {
warn!("error storing thumb: {:?}", msg);
conn.send_resp(500, "could not add thumb to content store");
return
}
// copy file to bucket
let dst_file_name = dst_file_path.join(content_name.clone());
let dst_file_copy = File::create(dst_file_name).and_then(|mut file| {
io::copy(&mut Cursor::new(buf), &mut file)
});
if let Err(msg) = dst_file_copy {
warn!("error storing file: {:?}", msg);
conn.send_resp(500, "could not add file to content store");
return
}
if let Err(msg) = dst_file_copy {
warn!("error storing file: {:?}", msg);
conn.send_resp(500, "could not add file to content store");
return
}
// store that sucker in the db...
match queries::find_or_insert(conn, NewEntry { hash: &digest, mime: Some(&file_ty.mime()) }) {
Some(entry) => send_json(conn, entry),
None=> conn.send_resp(500, "could not store entry in DB"),
}
}
fn store_thumbnail<P>(in_buf: &[u8], out_path: P, out_fmt: ImageFormat) -> ImageResult<()>
where P: AsRef<Path> {
let image = image::load_from_memory(in_buf)?;
let thumb = image.resize(200, 200, FilterType::Nearest);
let mut dest = File::create(out_path)?;
thumb.save(&mut dest, out_fmt)?;
dest.flush()?; Ok(())
} |
use controllers::prelude::*;
use models::{queries, Tag}; | random_line_split |
entries.rs | use std::env;
use std::error::Error;
use std::fs::{self, File};
use std::io::{self, Read, Write};
use std::path::{Path, PathBuf};
use controllers::prelude::*;
use models::{queries, Tag};
use views;
use util;
use aqua_web::plug;
use aqua_web::mw::forms::{MultipartForm, SavedFile};
use aqua_web::mw::router::Router;
use glob::glob;
use image::{self, FilterType, ImageFormat, ImageResult};
use serde_json;
#[derive(Serialize)]
struct TagView {
tags: Vec<Tag>,
}
fn glob_for_category(category: &str, digest: &str) -> String {
// TODO: assert digest is really a digest
// TODO: assert category is really a category
PathBuf::from(env::var("CONTENT_STORE").unwrap())
.join(format!("{}{}", category, &digest[..2]))
.join(&digest)
.with_extension("*")
.to_string_lossy()
.into_owned()
}
/// Fetch the file for a given entry ID
/// `GET /show/{id}`
pub fn show(conn: &mut plug::Conn) {
let file_id = Router::param::<i64>(conn, "id")
.expect("missing route param: id");
match queries::find_entry(conn, file_id) {
Ok(entry) => {
let glob_pattern = glob_for_category("f", &entry.hash);
info!("glob pattern: {}", glob_pattern);
let paths = glob(&glob_pattern)
.expect("could not parse glob pattern")
.map(|res| res.ok().unwrap())
.collect::<Vec<PathBuf>>();
assert_eq!(paths.len(), 1);
conn.send_file(200, &paths[0]);
// conn.send_resp(200, &path_glob);
},
Err(err) => conn.send_resp(404, err.description()),
}
}
pub fn show_thumb(conn: &mut plug::Conn) {
let file_id = Router::param::<i64>(conn, "id")
.expect("missing route param: id");
match queries::find_entry(conn, file_id) {
Ok(entry) => {
let glob_pattern = glob_for_category("t", &entry.hash);
info!("glob pattern: {}", glob_pattern);
let paths = glob(&glob_pattern)
.expect("could not parse glob pattern")
.map(|res| res.ok().unwrap())
.collect::<Vec<PathBuf>>();
assert_eq!(paths.len(), 1);
conn.send_file(200, &paths[0]);
// conn.send_resp(200, &path_glob);
},
Err(err) => conn.send_resp(404, err.description()),
}
}
/// `GET /entries/{id}/tags`
///
/// Gets a view fragment to show and modify the tags.
pub fn show_entry_tags(conn: &mut plug::Conn) {
let entry_id = Router::param::<i64>(conn, "id")
.expect("missing route param: id");
let tags = queries::find_tags_for(conn, entry_id)
.expect("could not load tags");
let data = TagView { tags: tags };
let view = views::render(conn.req(), "tag/_panel", &data);
conn.send_resp(200, &view);
}
/// `POST /entries/upload`
///
/// Returns a `models::Entry` as JSON or an HTTP 500 error on failure.
/// Expects a multipart form containing a file payload in the field `upload`.
/// This payload is extracted and converted to a SHA-256 digest.
///
/// If the entry already exists it is returned immediately, otherwise it is
/// moved to the content addressable storage pool and the entry is created.
///
pub fn | (conn: &mut plug::Conn) {
// TODO: handle webm, etc.
use models::queries;
// TODO: simpler way to get extensions
let mut form_fields = { conn.req_mut().mut_extensions().pop::<MultipartForm>() };
// NOTE: these are separate b/c we need to hang on to the file ref...
let file_upload = form_fields.as_mut()
.and_then(|form| extract_file(form, "upload"));
let digest = file_upload.as_ref()
.and_then(|file| util::processing::hash_file(file.path.as_path()).ok());
// TODO: these are gross b/c we can't return anything, thus there's no good
// way to use Result+`try!`...
let file_upload = match file_upload {
None => { conn.send_resp(500, "file upload missing?"); return },
Some(file_upload) => file_upload,
};
let digest = match digest {
None => { conn.send_resp(500, "file upload did not digest?"); return },
Some(digest) => digest,
};
info!("got file digest: {}", digest);
match queries::find_entry_by_hash(conn, &digest) {
Ok(Some(entry)) => send_json(conn, entry),
Ok(None) => write_entry(conn, digest, file_upload),
Err(msg) => conn.send_resp(500, &format!("could not load entry[{}]: {}", digest, msg)),
};
}
// TODO: pull this out to aqua web?
fn send_json<T: ::serde::ser::Serialize>(conn: &mut plug::Conn, json_payload: T) {
let output = serde_json::to_string(&json_payload)
.expect("could not serialize output!");
conn.send_resp(200, &output);
}
// TODO:???
fn write_entry(conn: &mut plug::Conn, digest: String, file: SavedFile) {
use models::{queries, NewEntry};
// open the file
let mut file = match File::open(file.path) {
Ok(file) => file,
Err(_msg) => { conn.send_resp(500, "could not open your upload..."); return },
};
// read into temp buffer
let mut buf = vec![];
let file_ty = match file.read_to_end(&mut buf) {
Ok(_size) => util::processing::detect_image(&buf[..]),
Err(_msg) => { conn.send_resp(500, "could not read your upload..."); return },
};
// create content aware address for it
let (content_path, thumb_path, content_name, file_ty) = match file_ty {
Some(file_ty) => (
format!("{}/f{}", env::var("CONTENT_STORE").unwrap(), &digest[..2]),
format!("{}/t{}", env::var("CONTENT_STORE").unwrap(), &digest[..2]),
format!("{}.{}", &digest[..], file_ty.extension()),
file_ty
),
None => { conn.send_resp(500, "unsupported mime type"); return },
};
// create buckets in content store
let dst_file_path = PathBuf::from(content_path);
if let Err(msg) = fs::create_dir_all(&dst_file_path) {
warn!("could not create content store bucket: {}", msg);
conn.send_resp(500, "could not add file to content store");
return
}
let dst_thumb_path = PathBuf::from(thumb_path);
if let Err(msg) = fs::create_dir_all(&dst_thumb_path) {
warn!("could not create content store bucket: {}", msg);
conn.send_resp(500, "could not add file to content store");
return
}
// copy thumbnail to bucket
let dst_file_name = dst_thumb_path.join(content_name.clone());
if let Err(msg) = store_thumbnail(&buf, &dst_file_name, file_ty.format()) {
warn!("error storing thumb: {:?}", msg);
conn.send_resp(500, "could not add thumb to content store");
return
}
// copy file to bucket
let dst_file_name = dst_file_path.join(content_name.clone());
let dst_file_copy = File::create(dst_file_name).and_then(|mut file| {
io::copy(&mut Cursor::new(buf), &mut file)
});
if let Err(msg) = dst_file_copy {
warn!("error storing file: {:?}", msg);
conn.send_resp(500, "could not add file to content store");
return
}
if let Err(msg) = dst_file_copy {
warn!("error storing file: {:?}", msg);
conn.send_resp(500, "could not add file to content store");
return
}
// store that sucker in the db...
match queries::find_or_insert(conn, NewEntry { hash: &digest, mime: Some(&file_ty.mime()) }) {
Some(entry) => send_json(conn, entry),
None=> conn.send_resp(500, "could not store entry in DB"),
}
}
fn store_thumbnail<P>(in_buf: &[u8], out_path: P, out_fmt: ImageFormat) -> ImageResult<()>
where P: AsRef<Path> {
let image = image::load_from_memory(in_buf)?;
let thumb = image.resize(200, 200, FilterType::Nearest);
let mut dest = File::create(out_path)?;
thumb.save(&mut dest, out_fmt)?;
dest.flush()?; Ok(())
}
| submit | identifier_name |
variable.rs | use std::fmt;
use path::Path;
pub struct | {
name: String,
is_new_declaration: bool,
is_global: bool
}
impl VariableAssignment {
pub fn new(name: String, is_new_declaration: bool, is_global: bool) -> VariableAssignment {
VariableAssignment {
name: name,
is_new_declaration: is_new_declaration,
is_global: is_global
}
}
pub fn name(&self) -> &str {
&self.name
}
pub fn is_new_declaration(&self) -> bool {
self.is_new_declaration
}
pub fn is_global(&self) -> bool {
self.is_global
}
pub fn set_is_global(&mut self, is_global: bool) {
self.is_global = is_global
}
}
impl fmt::Display for VariableAssignment {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "VarAssign to {}", self.name)
}
}
pub struct VariableReference {
name: String
}
impl VariableReference {
pub fn new(name: String) -> VariableReference {
VariableReference {
name: name
}
}
pub fn name(&self) -> &str {
&self.name
}
}
impl fmt::Display for VariableReference {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "var({})", self.name)
}
}
pub struct ReadCount {
target: Path
}
impl ReadCount {
pub fn new(target: Path) -> ReadCount {
ReadCount {
target: target
}
}
pub fn target(&self) -> &Path {
&self.target
}
}
impl fmt::Display for ReadCount {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "read_count({})", self.target.to_string())
}
} | VariableAssignment | identifier_name |
variable.rs | use std::fmt;
use path::Path;
pub struct VariableAssignment {
name: String,
is_new_declaration: bool,
is_global: bool
}
impl VariableAssignment {
pub fn new(name: String, is_new_declaration: bool, is_global: bool) -> VariableAssignment {
VariableAssignment {
name: name,
is_new_declaration: is_new_declaration,
is_global: is_global
}
}
pub fn name(&self) -> &str {
&self.name
}
pub fn is_new_declaration(&self) -> bool {
self.is_new_declaration
}
pub fn is_global(&self) -> bool {
self.is_global
}
pub fn set_is_global(&mut self, is_global: bool) {
self.is_global = is_global
}
}
impl fmt::Display for VariableAssignment {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "VarAssign to {}", self.name)
}
}
pub struct VariableReference {
name: String
}
impl VariableReference {
pub fn new(name: String) -> VariableReference {
VariableReference {
name: name
}
}
pub fn name(&self) -> &str {
&self.name
}
}
impl fmt::Display for VariableReference { | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "var({})", self.name)
}
}
pub struct ReadCount {
target: Path
}
impl ReadCount {
pub fn new(target: Path) -> ReadCount {
ReadCount {
target: target
}
}
pub fn target(&self) -> &Path {
&self.target
}
}
impl fmt::Display for ReadCount {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "read_count({})", self.target.to_string())
}
} | random_line_split |
|
variable.rs | use std::fmt;
use path::Path;
pub struct VariableAssignment {
name: String,
is_new_declaration: bool,
is_global: bool
}
impl VariableAssignment {
pub fn new(name: String, is_new_declaration: bool, is_global: bool) -> VariableAssignment {
VariableAssignment {
name: name,
is_new_declaration: is_new_declaration,
is_global: is_global
}
}
pub fn name(&self) -> &str |
pub fn is_new_declaration(&self) -> bool {
self.is_new_declaration
}
pub fn is_global(&self) -> bool {
self.is_global
}
pub fn set_is_global(&mut self, is_global: bool) {
self.is_global = is_global
}
}
impl fmt::Display for VariableAssignment {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "VarAssign to {}", self.name)
}
}
pub struct VariableReference {
name: String
}
impl VariableReference {
pub fn new(name: String) -> VariableReference {
VariableReference {
name: name
}
}
pub fn name(&self) -> &str {
&self.name
}
}
impl fmt::Display for VariableReference {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "var({})", self.name)
}
}
pub struct ReadCount {
target: Path
}
impl ReadCount {
pub fn new(target: Path) -> ReadCount {
ReadCount {
target: target
}
}
pub fn target(&self) -> &Path {
&self.target
}
}
impl fmt::Display for ReadCount {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "read_count({})", self.target.to_string())
}
} | {
&self.name
} | identifier_body |
fakekms_test.rs | // Copyright 2020 The Tink-Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
use tink_core::registry::KmsClient;
use tink_tests::{fakekms, fakekms::FakeClient};
const KEY_URI: &str = "fake-kms://CM2b3_MDElQKSAowdHlwZS5nb29nbGVhcGlzLmNvbS9nb29nbGUuY3J5cHRvLnRpbmsuQWVzR2NtS2V5EhIaEIK75t5L-adlUwVhWvRuWUwYARABGM2b3_MDIAE";
const ANOTHER_KEY_URI: &str = "fake-kms://CLHW_5cHElQKSAowdHlwZS5nb29nbGVhcGlzLmNvbS9nb29nbGUuY3J5cHRvLnRpbmsuQWVzR2NtS2V5EhIaEIZ-2h9InfZTbbkJjaJBsVgYARABGLHW_5cHIAE";
#[test]
fn test_valid_key_uris() {
tink_aead::init();
let new_key_uri = fakekms::new_key_uri().unwrap();
let test_cases = vec![KEY_URI, ANOTHER_KEY_URI, &new_key_uri];
for key_uri in test_cases {
let client = fakekms::FakeClient::new(key_uri).unwrap(); | assert!(client.supported(key_uri));
let primitive = client.get_aead(key_uri).unwrap();
let plaintext = b"some data to encrypt";
let aad = b"extra data to authenticate";
let ciphertext = primitive.encrypt(&plaintext[..], &aad[..]).unwrap();
let decrypted = primitive.decrypt(&ciphertext, &aad[..]).unwrap();
assert_eq!(&plaintext[..], decrypted);
}
}
#[test]
fn test_bad_uri_prefix() {
tink_aead::init();
assert!(fakekms::FakeClient::new("bad-prefix://encodedkeyset").is_err());
}
#[test]
fn test_valid_prefix() {
tink_aead::init();
let uri_prefix = "fake-kms://CM2b"; // is a prefix of KEY_URI
let client = FakeClient::new(uri_prefix).unwrap();
assert!(client.supported(KEY_URI));
let result = client.get_aead(KEY_URI);
assert!(result.is_ok(), "{:?}", result.err());
}
#[test]
fn test_invalid_prefix() {
tink_aead::init();
let uri_prefix = "fake-kms://CM2x"; // is not a prefix of KEY_URI
let client = FakeClient::new(uri_prefix).unwrap();
assert!(!client.supported(KEY_URI));
assert!(client.get_aead(KEY_URI).is_err());
}
#[test]
fn test_get_aead_fails_with_bad_keyset_encoding() {
tink_aead::init();
let client = FakeClient::new("fake-kms://bad").unwrap();
assert!(client.get_aead("fake-kms://badencoding").is_err());
} | random_line_split |
|
fakekms_test.rs | // Copyright 2020 The Tink-Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
use tink_core::registry::KmsClient;
use tink_tests::{fakekms, fakekms::FakeClient};
const KEY_URI: &str = "fake-kms://CM2b3_MDElQKSAowdHlwZS5nb29nbGVhcGlzLmNvbS9nb29nbGUuY3J5cHRvLnRpbmsuQWVzR2NtS2V5EhIaEIK75t5L-adlUwVhWvRuWUwYARABGM2b3_MDIAE";
const ANOTHER_KEY_URI: &str = "fake-kms://CLHW_5cHElQKSAowdHlwZS5nb29nbGVhcGlzLmNvbS9nb29nbGUuY3J5cHRvLnRpbmsuQWVzR2NtS2V5EhIaEIZ-2h9InfZTbbkJjaJBsVgYARABGLHW_5cHIAE";
#[test]
fn test_valid_key_uris() {
tink_aead::init();
let new_key_uri = fakekms::new_key_uri().unwrap();
let test_cases = vec![KEY_URI, ANOTHER_KEY_URI, &new_key_uri];
for key_uri in test_cases {
let client = fakekms::FakeClient::new(key_uri).unwrap();
assert!(client.supported(key_uri));
let primitive = client.get_aead(key_uri).unwrap();
let plaintext = b"some data to encrypt";
let aad = b"extra data to authenticate";
let ciphertext = primitive.encrypt(&plaintext[..], &aad[..]).unwrap();
let decrypted = primitive.decrypt(&ciphertext, &aad[..]).unwrap();
assert_eq!(&plaintext[..], decrypted);
}
}
#[test]
fn test_bad_uri_prefix() {
tink_aead::init();
assert!(fakekms::FakeClient::new("bad-prefix://encodedkeyset").is_err());
}
#[test]
fn test_valid_prefix() |
#[test]
fn test_invalid_prefix() {
tink_aead::init();
let uri_prefix = "fake-kms://CM2x"; // is not a prefix of KEY_URI
let client = FakeClient::new(uri_prefix).unwrap();
assert!(!client.supported(KEY_URI));
assert!(client.get_aead(KEY_URI).is_err());
}
#[test]
fn test_get_aead_fails_with_bad_keyset_encoding() {
tink_aead::init();
let client = FakeClient::new("fake-kms://bad").unwrap();
assert!(client.get_aead("fake-kms://badencoding").is_err());
}
| {
tink_aead::init();
let uri_prefix = "fake-kms://CM2b"; // is a prefix of KEY_URI
let client = FakeClient::new(uri_prefix).unwrap();
assert!(client.supported(KEY_URI));
let result = client.get_aead(KEY_URI);
assert!(result.is_ok(), "{:?}", result.err());
} | identifier_body |
fakekms_test.rs | // Copyright 2020 The Tink-Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
use tink_core::registry::KmsClient;
use tink_tests::{fakekms, fakekms::FakeClient};
const KEY_URI: &str = "fake-kms://CM2b3_MDElQKSAowdHlwZS5nb29nbGVhcGlzLmNvbS9nb29nbGUuY3J5cHRvLnRpbmsuQWVzR2NtS2V5EhIaEIK75t5L-adlUwVhWvRuWUwYARABGM2b3_MDIAE";
const ANOTHER_KEY_URI: &str = "fake-kms://CLHW_5cHElQKSAowdHlwZS5nb29nbGVhcGlzLmNvbS9nb29nbGUuY3J5cHRvLnRpbmsuQWVzR2NtS2V5EhIaEIZ-2h9InfZTbbkJjaJBsVgYARABGLHW_5cHIAE";
#[test]
fn test_valid_key_uris() {
tink_aead::init();
let new_key_uri = fakekms::new_key_uri().unwrap();
let test_cases = vec![KEY_URI, ANOTHER_KEY_URI, &new_key_uri];
for key_uri in test_cases {
let client = fakekms::FakeClient::new(key_uri).unwrap();
assert!(client.supported(key_uri));
let primitive = client.get_aead(key_uri).unwrap();
let plaintext = b"some data to encrypt";
let aad = b"extra data to authenticate";
let ciphertext = primitive.encrypt(&plaintext[..], &aad[..]).unwrap();
let decrypted = primitive.decrypt(&ciphertext, &aad[..]).unwrap();
assert_eq!(&plaintext[..], decrypted);
}
}
#[test]
fn test_bad_uri_prefix() {
tink_aead::init();
assert!(fakekms::FakeClient::new("bad-prefix://encodedkeyset").is_err());
}
#[test]
fn | () {
tink_aead::init();
let uri_prefix = "fake-kms://CM2b"; // is a prefix of KEY_URI
let client = FakeClient::new(uri_prefix).unwrap();
assert!(client.supported(KEY_URI));
let result = client.get_aead(KEY_URI);
assert!(result.is_ok(), "{:?}", result.err());
}
#[test]
fn test_invalid_prefix() {
tink_aead::init();
let uri_prefix = "fake-kms://CM2x"; // is not a prefix of KEY_URI
let client = FakeClient::new(uri_prefix).unwrap();
assert!(!client.supported(KEY_URI));
assert!(client.get_aead(KEY_URI).is_err());
}
#[test]
fn test_get_aead_fails_with_bad_keyset_encoding() {
tink_aead::init();
let client = FakeClient::new("fake-kms://bad").unwrap();
assert!(client.get_aead("fake-kms://badencoding").is_err());
}
| test_valid_prefix | identifier_name |
closure.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use back::abi;
use back::link::mangle_internal_name_by_path_and_seq;
use driver::config::FullDebugInfo;
use lib::llvm::ValueRef;
use middle::def;
use middle::freevars;
use middle::lang_items::ClosureExchangeMallocFnLangItem;
use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::common::*;
use middle::trans::datum::{Datum, DatumBlock, Expr, Lvalue, rvalue_scratch_datum};
use middle::trans::debuginfo;
use middle::trans::expr;
use middle::trans::machine::llsize_of;
use middle::trans::type_of::*;
use middle::trans::type_::Type;
use middle::ty;
use util::ppaux::Repr;
use util::ppaux::ty_to_str;
use arena::TypedArena;
use syntax::ast;
// ___Good to know (tm)__________________________________________________
//
// The layout of a closure environment in memory is
// roughly as follows:
//
// struct rust_opaque_box { // see rust_internal.h
// unsigned ref_count; // obsolete (part of @T's header)
// fn(void*) *drop_glue; // destructor (for proc)
// rust_opaque_box *prev; // obsolete (part of @T's header)
// rust_opaque_box *next; // obsolete (part of @T's header)
// struct closure_data {
// upvar1_t upvar1;
// ...
// upvarN_t upvarN;
// }
// };
//
// Note that the closure is itself a rust_opaque_box. This is true
// even for ~fn and ||, because we wish to keep binary compatibility
// between all kinds of closures. The allocation strategy for this
// closure depends on the closure type. For a sendfn, the closure
// (and the referenced type descriptors) will be allocated in the
// exchange heap. For a fn, the closure is allocated in the task heap
// and is reference counted. For a block, the closure is allocated on
// the stack.
//
// ## Opaque closures and the embedded type descriptor ##
//
// One interesting part of closures is that they encapsulate the data
// that they close over. So when I have a ptr to a closure, I do not
// know how many type descriptors it contains nor what upvars are
// captured within. That means I do not know precisely how big it is
// nor where its fields are located. This is called an "opaque
// closure".
//
// Typically an opaque closure suffices because we only manipulate it
// by ptr. The routine Type::at_box().ptr_to() returns an appropriate
// type for such an opaque closure; it allows access to the box fields,
// but not the closure_data itself.
//
// But sometimes, such as when cloning or freeing a closure, we need
// to know the full information. That is where the type descriptor
// that defines the closure comes in handy. We can use its take and
// drop glue functions to allocate/free data as needed.
//
// ## Subtleties concerning alignment ##
//
// It is important that we be able to locate the closure data *without
// knowing the kind of data that is being bound*. This can be tricky
// because the alignment requirements of the bound data affects the
// alignment requires of the closure_data struct as a whole. However,
// right now this is a non-issue in any case, because the size of the
// rust_opaque_box header is always a multiple of 16-bytes, which is
// the maximum alignment requirement we ever have to worry about.
//
// The only reason alignment matters is that, in order to learn what data
// is bound, we would normally first load the type descriptors: but their
// location is ultimately depend on their content! There is, however, a
// workaround. We can load the tydesc from the rust_opaque_box, which
// describes the closure_data struct and has self-contained derived type
// descriptors, and read the alignment from there. It's just annoying to
// do. Hopefully should this ever become an issue we'll have monomorphized
// and type descriptors will all be a bad dream.
//
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
pub struct EnvValue {
action: freevars::CaptureMode,
datum: Datum<Lvalue>
}
impl EnvValue {
pub fn to_str(&self, ccx: &CrateContext) -> String {
format!("{}({})", self.action, self.datum.to_str(ccx))
}
}
// Given a closure ty, emits a corresponding tuple ty
pub fn mk_closure_tys(tcx: &ty::ctxt,
bound_values: &[EnvValue])
-> ty::t {
// determine the types of the values in the env. Note that this
// is the actual types that will be stored in the map, not the
// logical types as the user sees them, so by-ref upvars must be
// converted to ptrs.
let bound_tys = bound_values.iter().map(|bv| {
match bv.action {
freevars::CaptureByValue => bv.datum.ty,
freevars::CaptureByRef => ty::mk_mut_ptr(tcx, bv.datum.ty)
}
}).collect();
let cdata_ty = ty::mk_tup(tcx, bound_tys);
debug!("cdata_ty={}", ty_to_str(tcx, cdata_ty));
return cdata_ty;
}
fn tuplify_box_ty(tcx: &ty::ctxt, t: ty::t) -> ty::t {
let ptr = ty::mk_imm_ptr(tcx, ty::mk_i8());
ty::mk_tup(tcx, vec!(ty::mk_uint(), ty::mk_nil_ptr(tcx), ptr, ptr, t))
}
fn allocate_cbox<'a>(bcx: &'a Block<'a>,
store: ty::TraitStore,
cdata_ty: ty::t)
-> Result<'a> {
let _icx = push_ctxt("closure::allocate_cbox");
let tcx = bcx.tcx();
// Allocate and initialize the box:
match store {
ty::UniqTraitStore => {
let ty = type_of(bcx.ccx(), cdata_ty);
let size = llsize_of(bcx.ccx(), ty);
// we treat proc as @ here, which isn't ideal
malloc_raw_dyn_managed(bcx, cdata_ty, ClosureExchangeMallocFnLangItem, size)
}
ty::RegionTraitStore(..) => {
let cbox_ty = tuplify_box_ty(tcx, cdata_ty);
let llbox = alloc_ty(bcx, cbox_ty, "__closure");
Result::new(bcx, llbox)
}
}
}
pub struct ClosureResult<'a> {
llbox: ValueRef, // llvalue of ptr to closure
cdata_ty: ty::t, // type of the closure data
bcx: &'a Block<'a> // final bcx
}
// Given a block context and a list of tydescs and values to bind
// construct a closure out of them. If copying is true, it is a
// heap allocated closure that copies the upvars into environment.
// Otherwise, it is stack allocated and copies pointers to the upvars.
pub fn store_environment<'a>(
bcx: &'a Block<'a>,
bound_values: Vec<EnvValue>,
store: ty::TraitStore)
-> ClosureResult<'a> {
let _icx = push_ctxt("closure::store_environment");
let ccx = bcx.ccx();
let tcx = ccx.tcx();
// compute the type of the closure
let cdata_ty = mk_closure_tys(tcx, bound_values.as_slice());
// cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a
// tuple. This could be a ptr in uniq or a box or on stack,
// whatever.
let cbox_ty = tuplify_box_ty(tcx, cdata_ty);
let cboxptr_ty = ty::mk_ptr(tcx, ty::mt {ty:cbox_ty, mutbl:ast::MutImmutable});
let llboxptr_ty = type_of(ccx, cboxptr_ty);
// If there are no bound values, no point in allocating anything.
if bound_values.is_empty() {
return ClosureResult {llbox: C_null(llboxptr_ty),
cdata_ty: cdata_ty,
bcx: bcx};
}
// allocate closure in the heap
let Result {bcx: bcx, val: llbox} = allocate_cbox(bcx, store, cdata_ty);
let llbox = PointerCast(bcx, llbox, llboxptr_ty);
debug!("tuplify_box_ty = {}", ty_to_str(tcx, cbox_ty));
// Copy expr values into boxed bindings.
let mut bcx = bcx;
for (i, bv) in bound_values.move_iter().enumerate() {
debug!("Copy {} into closure", bv.to_str(ccx));
if ccx.sess().asm_comments() {
add_comment(bcx, format!("Copy {} into closure",
bv.to_str(ccx)).as_slice());
}
let bound_data = GEPi(bcx, llbox, [0u, abi::box_field_body, i]);
match bv.action {
freevars::CaptureByValue => {
bcx = bv.datum.store_to(bcx, bound_data);
}
freevars::CaptureByRef => {
Store(bcx, bv.datum.to_llref(), bound_data);
}
}
}
ClosureResult { llbox: llbox, cdata_ty: cdata_ty, bcx: bcx }
}
// Given a context and a list of upvars, build a closure. This just
// collects the upvars and packages them up for store_environment.
fn build_closure<'a>(bcx0: &'a Block<'a>,
freevar_mode: freevars::CaptureMode,
freevars: &Vec<freevars::freevar_entry>,
store: ty::TraitStore)
-> ClosureResult<'a>
{
let _icx = push_ctxt("closure::build_closure");
// If we need to, package up the iterator body to call
let bcx = bcx0;
// Package up the captured upvars
let mut env_vals = Vec::new();
for freevar in freevars.iter() {
let datum = expr::trans_local_var(bcx, freevar.def);
env_vals.push(EnvValue {action: freevar_mode, datum: datum});
}
store_environment(bcx, env_vals, store)
}
// Given an enclosing block context, a new function context, a closure type,
// and a list of upvars, generate code to load and populate the environment
// with the upvars and type descriptors.
fn load_environment<'a>(bcx: &'a Block<'a>,
cdata_ty: ty::t,
freevars: &Vec<freevars::freevar_entry>,
store: ty::TraitStore)
-> &'a Block<'a> {
let _icx = push_ctxt("closure::load_environment");
// Don't bother to create the block if there's nothing to load
if freevars.len() == 0 {
return bcx;
}
// Load a pointer to the closure data, skipping over the box header:
let llcdata = at_box_body(bcx, cdata_ty, bcx.fcx.llenv.unwrap());
// Store the pointer to closure data in an alloca for debug info because that's what the
// llvm.dbg.declare intrinsic expects
let env_pointer_alloca = if bcx.sess().opts.debuginfo == FullDebugInfo {
let alloc = alloc_ty(bcx, ty::mk_mut_ptr(bcx.tcx(), cdata_ty), "__debuginfo_env_ptr");
Store(bcx, llcdata, alloc);
Some(alloc)
} else {
None
};
// Populate the upvars from the environment
let mut i = 0u;
for freevar in freevars.iter() {
let mut upvarptr = GEPi(bcx, llcdata, [0u, i]);
match store {
ty::RegionTraitStore(..) => { upvarptr = Load(bcx, upvarptr); }
ty::UniqTraitStore => {}
}
let def_id = freevar.def.def_id();
bcx.fcx.llupvars.borrow_mut().insert(def_id.node, upvarptr);
for &env_pointer_alloca in env_pointer_alloca.iter() {
debuginfo::create_captured_var_metadata(
bcx,
def_id.node,
cdata_ty,
env_pointer_alloca,
i,
store,
freevar.span);
}
i += 1u;
}
bcx
}
fn fill_fn_pair(bcx: &Block, pair: ValueRef, llfn: ValueRef, llenvptr: ValueRef) {
Store(bcx, llfn, GEPi(bcx, pair, [0u, abi::fn_field_code]));
let llenvptr = PointerCast(bcx, llenvptr, Type::i8p(bcx.ccx()));
Store(bcx, llenvptr, GEPi(bcx, pair, [0u, abi::fn_field_box]));
}
pub fn trans_expr_fn<'a>(
bcx: &'a Block<'a>,
store: ty::TraitStore,
decl: &ast::FnDecl,
body: &ast::Block,
id: ast::NodeId,
dest: expr::Dest)
-> &'a Block<'a> {
/*!
*
* Translates the body of a closure expression.
*
* - `store`
* - `decl`
* - `body`
* - `id`: The id of the closure expression.
* - `cap_clause`: information about captured variables, if any.
* - `dest`: where to write the closure value, which must be a
(fn ptr, env) pair
*/
let _icx = push_ctxt("closure::trans_expr_fn");
let dest_addr = match dest {
expr::SaveIn(p) => p,
expr::Ignore => {
return bcx; // closure construction is non-side-effecting
}
};
let ccx = bcx.ccx();
let tcx = bcx.tcx();
let fty = node_id_type(bcx, id);
let s = tcx.map.with_path(id, |path| {
mangle_internal_name_by_path_and_seq(path, "closure")
});
let llfn = decl_internal_rust_fn(ccx, fty, s.as_slice());
// set an inline hint for all closures
set_inline_hint(llfn);
let freevar_mode = freevars::get_capture_mode(tcx, id);
let freevars: Vec<freevars::freevar_entry> =
freevars::with_freevars(
tcx, id,
|fv| fv.iter().map(|&fv| fv).collect());
let ClosureResult {llbox, cdata_ty, bcx} =
build_closure(bcx, freevar_mode, &freevars, store);
trans_closure(ccx, decl, body, llfn,
bcx.fcx.param_substs, id,
[], ty::ty_fn_ret(fty),
|bcx| load_environment(bcx, cdata_ty, &freevars, store));
fill_fn_pair(bcx, dest_addr, llfn, llbox);
bcx
}
pub fn get_wrapper_for_bare_fn(ccx: &CrateContext,
closure_ty: ty::t,
def: def::Def,
fn_ptr: ValueRef,
is_local: bool) -> ValueRef {
let def_id = match def {
def::DefFn(did, _) | def::DefStaticMethod(did, _, _) |
def::DefVariant(_, did, _) | def::DefStruct(did) => did,
_ => {
ccx.sess().bug(format!("get_wrapper_for_bare_fn: \
expected a statically resolved fn, got \
{:?}",
def).as_slice());
}
};
|
let tcx = ccx.tcx();
debug!("get_wrapper_for_bare_fn(closure_ty={})", closure_ty.repr(tcx));
let f = match ty::get(closure_ty).sty {
ty::ty_closure(ref f) => f,
_ => {
ccx.sess().bug(format!("get_wrapper_for_bare_fn: \
expected a closure ty, got {}",
closure_ty.repr(tcx)).as_slice());
}
};
let name = ty::with_path(tcx, def_id, |path| {
mangle_internal_name_by_path_and_seq(path, "as_closure")
});
let llfn = if is_local {
decl_internal_rust_fn(ccx, closure_ty, name.as_slice())
} else {
decl_rust_fn(ccx, closure_ty, name.as_slice())
};
ccx.closure_bare_wrapper_cache.borrow_mut().insert(fn_ptr, llfn);
// This is only used by statics inlined from a different crate.
if!is_local {
// Don't regenerate the wrapper, just reuse the original one.
return llfn;
}
let _icx = push_ctxt("closure::get_wrapper_for_bare_fn");
let arena = TypedArena::new();
let empty_param_substs = param_substs::empty();
let fcx = new_fn_ctxt(ccx, llfn, -1, true, f.sig.output,
&empty_param_substs, None, &arena);
init_function(&fcx, true, f.sig.output);
let bcx = fcx.entry_bcx.borrow().clone().unwrap();
let args = create_datums_for_fn_args(&fcx,
ty::ty_fn_args(closure_ty)
.as_slice());
let mut llargs = Vec::new();
match fcx.llretptr.get() {
Some(llretptr) => {
llargs.push(llretptr);
}
None => {}
}
llargs.extend(args.iter().map(|arg| arg.val));
let retval = Call(bcx, fn_ptr, llargs.as_slice(), []);
if type_is_zero_size(ccx, f.sig.output) || fcx.llretptr.get().is_some() {
RetVoid(bcx);
} else {
Ret(bcx, retval);
}
// HACK(eddyb) finish_fn cannot be used here, we returned directly.
debuginfo::clear_source_location(&fcx);
fcx.cleanup();
llfn
}
pub fn make_closure_from_bare_fn<'a>(bcx: &'a Block<'a>,
closure_ty: ty::t,
def: def::Def,
fn_ptr: ValueRef)
-> DatumBlock<'a, Expr> {
let scratch = rvalue_scratch_datum(bcx, closure_ty, "__adjust");
let wrapper = get_wrapper_for_bare_fn(bcx.ccx(), closure_ty, def, fn_ptr, true);
fill_fn_pair(bcx, scratch.val, wrapper, C_null(Type::i8p(bcx.ccx())));
DatumBlock::new(bcx, scratch.to_expr_datum())
} | match ccx.closure_bare_wrapper_cache.borrow().find(&fn_ptr) {
Some(&llval) => return llval,
None => {}
} | random_line_split |
closure.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use back::abi;
use back::link::mangle_internal_name_by_path_and_seq;
use driver::config::FullDebugInfo;
use lib::llvm::ValueRef;
use middle::def;
use middle::freevars;
use middle::lang_items::ClosureExchangeMallocFnLangItem;
use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::common::*;
use middle::trans::datum::{Datum, DatumBlock, Expr, Lvalue, rvalue_scratch_datum};
use middle::trans::debuginfo;
use middle::trans::expr;
use middle::trans::machine::llsize_of;
use middle::trans::type_of::*;
use middle::trans::type_::Type;
use middle::ty;
use util::ppaux::Repr;
use util::ppaux::ty_to_str;
use arena::TypedArena;
use syntax::ast;
// ___Good to know (tm)__________________________________________________
//
// The layout of a closure environment in memory is
// roughly as follows:
//
// struct rust_opaque_box { // see rust_internal.h
// unsigned ref_count; // obsolete (part of @T's header)
// fn(void*) *drop_glue; // destructor (for proc)
// rust_opaque_box *prev; // obsolete (part of @T's header)
// rust_opaque_box *next; // obsolete (part of @T's header)
// struct closure_data {
// upvar1_t upvar1;
// ...
// upvarN_t upvarN;
// }
// };
//
// Note that the closure is itself a rust_opaque_box. This is true
// even for ~fn and ||, because we wish to keep binary compatibility
// between all kinds of closures. The allocation strategy for this
// closure depends on the closure type. For a sendfn, the closure
// (and the referenced type descriptors) will be allocated in the
// exchange heap. For a fn, the closure is allocated in the task heap
// and is reference counted. For a block, the closure is allocated on
// the stack.
//
// ## Opaque closures and the embedded type descriptor ##
//
// One interesting part of closures is that they encapsulate the data
// that they close over. So when I have a ptr to a closure, I do not
// know how many type descriptors it contains nor what upvars are
// captured within. That means I do not know precisely how big it is
// nor where its fields are located. This is called an "opaque
// closure".
//
// Typically an opaque closure suffices because we only manipulate it
// by ptr. The routine Type::at_box().ptr_to() returns an appropriate
// type for such an opaque closure; it allows access to the box fields,
// but not the closure_data itself.
//
// But sometimes, such as when cloning or freeing a closure, we need
// to know the full information. That is where the type descriptor
// that defines the closure comes in handy. We can use its take and
// drop glue functions to allocate/free data as needed.
//
// ## Subtleties concerning alignment ##
//
// It is important that we be able to locate the closure data *without
// knowing the kind of data that is being bound*. This can be tricky
// because the alignment requirements of the bound data affects the
// alignment requires of the closure_data struct as a whole. However,
// right now this is a non-issue in any case, because the size of the
// rust_opaque_box header is always a multiple of 16-bytes, which is
// the maximum alignment requirement we ever have to worry about.
//
// The only reason alignment matters is that, in order to learn what data
// is bound, we would normally first load the type descriptors: but their
// location is ultimately depend on their content! There is, however, a
// workaround. We can load the tydesc from the rust_opaque_box, which
// describes the closure_data struct and has self-contained derived type
// descriptors, and read the alignment from there. It's just annoying to
// do. Hopefully should this ever become an issue we'll have monomorphized
// and type descriptors will all be a bad dream.
//
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
pub struct EnvValue {
action: freevars::CaptureMode,
datum: Datum<Lvalue>
}
impl EnvValue {
pub fn to_str(&self, ccx: &CrateContext) -> String {
format!("{}({})", self.action, self.datum.to_str(ccx))
}
}
// Given a closure ty, emits a corresponding tuple ty
pub fn mk_closure_tys(tcx: &ty::ctxt,
bound_values: &[EnvValue])
-> ty::t {
// determine the types of the values in the env. Note that this
// is the actual types that will be stored in the map, not the
// logical types as the user sees them, so by-ref upvars must be
// converted to ptrs.
let bound_tys = bound_values.iter().map(|bv| {
match bv.action {
freevars::CaptureByValue => bv.datum.ty,
freevars::CaptureByRef => ty::mk_mut_ptr(tcx, bv.datum.ty)
}
}).collect();
let cdata_ty = ty::mk_tup(tcx, bound_tys);
debug!("cdata_ty={}", ty_to_str(tcx, cdata_ty));
return cdata_ty;
}
fn tuplify_box_ty(tcx: &ty::ctxt, t: ty::t) -> ty::t {
let ptr = ty::mk_imm_ptr(tcx, ty::mk_i8());
ty::mk_tup(tcx, vec!(ty::mk_uint(), ty::mk_nil_ptr(tcx), ptr, ptr, t))
}
fn allocate_cbox<'a>(bcx: &'a Block<'a>,
store: ty::TraitStore,
cdata_ty: ty::t)
-> Result<'a> {
let _icx = push_ctxt("closure::allocate_cbox");
let tcx = bcx.tcx();
// Allocate and initialize the box:
match store {
ty::UniqTraitStore => {
let ty = type_of(bcx.ccx(), cdata_ty);
let size = llsize_of(bcx.ccx(), ty);
// we treat proc as @ here, which isn't ideal
malloc_raw_dyn_managed(bcx, cdata_ty, ClosureExchangeMallocFnLangItem, size)
}
ty::RegionTraitStore(..) => {
let cbox_ty = tuplify_box_ty(tcx, cdata_ty);
let llbox = alloc_ty(bcx, cbox_ty, "__closure");
Result::new(bcx, llbox)
}
}
}
pub struct ClosureResult<'a> {
llbox: ValueRef, // llvalue of ptr to closure
cdata_ty: ty::t, // type of the closure data
bcx: &'a Block<'a> // final bcx
}
// Given a block context and a list of tydescs and values to bind
// construct a closure out of them. If copying is true, it is a
// heap allocated closure that copies the upvars into environment.
// Otherwise, it is stack allocated and copies pointers to the upvars.
pub fn store_environment<'a>(
bcx: &'a Block<'a>,
bound_values: Vec<EnvValue>,
store: ty::TraitStore)
-> ClosureResult<'a> {
let _icx = push_ctxt("closure::store_environment");
let ccx = bcx.ccx();
let tcx = ccx.tcx();
// compute the type of the closure
let cdata_ty = mk_closure_tys(tcx, bound_values.as_slice());
// cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a
// tuple. This could be a ptr in uniq or a box or on stack,
// whatever.
let cbox_ty = tuplify_box_ty(tcx, cdata_ty);
let cboxptr_ty = ty::mk_ptr(tcx, ty::mt {ty:cbox_ty, mutbl:ast::MutImmutable});
let llboxptr_ty = type_of(ccx, cboxptr_ty);
// If there are no bound values, no point in allocating anything.
if bound_values.is_empty() {
return ClosureResult {llbox: C_null(llboxptr_ty),
cdata_ty: cdata_ty,
bcx: bcx};
}
// allocate closure in the heap
let Result {bcx: bcx, val: llbox} = allocate_cbox(bcx, store, cdata_ty);
let llbox = PointerCast(bcx, llbox, llboxptr_ty);
debug!("tuplify_box_ty = {}", ty_to_str(tcx, cbox_ty));
// Copy expr values into boxed bindings.
let mut bcx = bcx;
for (i, bv) in bound_values.move_iter().enumerate() {
debug!("Copy {} into closure", bv.to_str(ccx));
if ccx.sess().asm_comments() {
add_comment(bcx, format!("Copy {} into closure",
bv.to_str(ccx)).as_slice());
}
let bound_data = GEPi(bcx, llbox, [0u, abi::box_field_body, i]);
match bv.action {
freevars::CaptureByValue => {
bcx = bv.datum.store_to(bcx, bound_data);
}
freevars::CaptureByRef => {
Store(bcx, bv.datum.to_llref(), bound_data);
}
}
}
ClosureResult { llbox: llbox, cdata_ty: cdata_ty, bcx: bcx }
}
// Given a context and a list of upvars, build a closure. This just
// collects the upvars and packages them up for store_environment.
fn build_closure<'a>(bcx0: &'a Block<'a>,
freevar_mode: freevars::CaptureMode,
freevars: &Vec<freevars::freevar_entry>,
store: ty::TraitStore)
-> ClosureResult<'a>
{
let _icx = push_ctxt("closure::build_closure");
// If we need to, package up the iterator body to call
let bcx = bcx0;
// Package up the captured upvars
let mut env_vals = Vec::new();
for freevar in freevars.iter() {
let datum = expr::trans_local_var(bcx, freevar.def);
env_vals.push(EnvValue {action: freevar_mode, datum: datum});
}
store_environment(bcx, env_vals, store)
}
// Given an enclosing block context, a new function context, a closure type,
// and a list of upvars, generate code to load and populate the environment
// with the upvars and type descriptors.
fn load_environment<'a>(bcx: &'a Block<'a>,
cdata_ty: ty::t,
freevars: &Vec<freevars::freevar_entry>,
store: ty::TraitStore)
-> &'a Block<'a> {
let _icx = push_ctxt("closure::load_environment");
// Don't bother to create the block if there's nothing to load
if freevars.len() == 0 {
return bcx;
}
// Load a pointer to the closure data, skipping over the box header:
let llcdata = at_box_body(bcx, cdata_ty, bcx.fcx.llenv.unwrap());
// Store the pointer to closure data in an alloca for debug info because that's what the
// llvm.dbg.declare intrinsic expects
let env_pointer_alloca = if bcx.sess().opts.debuginfo == FullDebugInfo {
let alloc = alloc_ty(bcx, ty::mk_mut_ptr(bcx.tcx(), cdata_ty), "__debuginfo_env_ptr");
Store(bcx, llcdata, alloc);
Some(alloc)
} else {
None
};
// Populate the upvars from the environment
let mut i = 0u;
for freevar in freevars.iter() {
let mut upvarptr = GEPi(bcx, llcdata, [0u, i]);
match store {
ty::RegionTraitStore(..) => { upvarptr = Load(bcx, upvarptr); }
ty::UniqTraitStore => {}
}
let def_id = freevar.def.def_id();
bcx.fcx.llupvars.borrow_mut().insert(def_id.node, upvarptr);
for &env_pointer_alloca in env_pointer_alloca.iter() {
debuginfo::create_captured_var_metadata(
bcx,
def_id.node,
cdata_ty,
env_pointer_alloca,
i,
store,
freevar.span);
}
i += 1u;
}
bcx
}
fn fill_fn_pair(bcx: &Block, pair: ValueRef, llfn: ValueRef, llenvptr: ValueRef) {
Store(bcx, llfn, GEPi(bcx, pair, [0u, abi::fn_field_code]));
let llenvptr = PointerCast(bcx, llenvptr, Type::i8p(bcx.ccx()));
Store(bcx, llenvptr, GEPi(bcx, pair, [0u, abi::fn_field_box]));
}
pub fn trans_expr_fn<'a>(
bcx: &'a Block<'a>,
store: ty::TraitStore,
decl: &ast::FnDecl,
body: &ast::Block,
id: ast::NodeId,
dest: expr::Dest)
-> &'a Block<'a> {
/*!
*
* Translates the body of a closure expression.
*
* - `store`
* - `decl`
* - `body`
* - `id`: The id of the closure expression.
* - `cap_clause`: information about captured variables, if any.
* - `dest`: where to write the closure value, which must be a
(fn ptr, env) pair
*/
let _icx = push_ctxt("closure::trans_expr_fn");
let dest_addr = match dest {
expr::SaveIn(p) => p,
expr::Ignore => {
return bcx; // closure construction is non-side-effecting
}
};
let ccx = bcx.ccx();
let tcx = bcx.tcx();
let fty = node_id_type(bcx, id);
let s = tcx.map.with_path(id, |path| {
mangle_internal_name_by_path_and_seq(path, "closure")
});
let llfn = decl_internal_rust_fn(ccx, fty, s.as_slice());
// set an inline hint for all closures
set_inline_hint(llfn);
let freevar_mode = freevars::get_capture_mode(tcx, id);
let freevars: Vec<freevars::freevar_entry> =
freevars::with_freevars(
tcx, id,
|fv| fv.iter().map(|&fv| fv).collect());
let ClosureResult {llbox, cdata_ty, bcx} =
build_closure(bcx, freevar_mode, &freevars, store);
trans_closure(ccx, decl, body, llfn,
bcx.fcx.param_substs, id,
[], ty::ty_fn_ret(fty),
|bcx| load_environment(bcx, cdata_ty, &freevars, store));
fill_fn_pair(bcx, dest_addr, llfn, llbox);
bcx
}
pub fn get_wrapper_for_bare_fn(ccx: &CrateContext,
closure_ty: ty::t,
def: def::Def,
fn_ptr: ValueRef,
is_local: bool) -> ValueRef {
let def_id = match def {
def::DefFn(did, _) | def::DefStaticMethod(did, _, _) |
def::DefVariant(_, did, _) | def::DefStruct(did) => did,
_ => {
ccx.sess().bug(format!("get_wrapper_for_bare_fn: \
expected a statically resolved fn, got \
{:?}",
def).as_slice());
}
};
match ccx.closure_bare_wrapper_cache.borrow().find(&fn_ptr) {
Some(&llval) => return llval,
None => {}
}
let tcx = ccx.tcx();
debug!("get_wrapper_for_bare_fn(closure_ty={})", closure_ty.repr(tcx));
let f = match ty::get(closure_ty).sty {
ty::ty_closure(ref f) => f,
_ => {
ccx.sess().bug(format!("get_wrapper_for_bare_fn: \
expected a closure ty, got {}",
closure_ty.repr(tcx)).as_slice());
}
};
let name = ty::with_path(tcx, def_id, |path| {
mangle_internal_name_by_path_and_seq(path, "as_closure")
});
let llfn = if is_local {
decl_internal_rust_fn(ccx, closure_ty, name.as_slice())
} else {
decl_rust_fn(ccx, closure_ty, name.as_slice())
};
ccx.closure_bare_wrapper_cache.borrow_mut().insert(fn_ptr, llfn);
// This is only used by statics inlined from a different crate.
if!is_local {
// Don't regenerate the wrapper, just reuse the original one.
return llfn;
}
let _icx = push_ctxt("closure::get_wrapper_for_bare_fn");
let arena = TypedArena::new();
let empty_param_substs = param_substs::empty();
let fcx = new_fn_ctxt(ccx, llfn, -1, true, f.sig.output,
&empty_param_substs, None, &arena);
init_function(&fcx, true, f.sig.output);
let bcx = fcx.entry_bcx.borrow().clone().unwrap();
let args = create_datums_for_fn_args(&fcx,
ty::ty_fn_args(closure_ty)
.as_slice());
let mut llargs = Vec::new();
match fcx.llretptr.get() {
Some(llretptr) => {
llargs.push(llretptr);
}
None => {}
}
llargs.extend(args.iter().map(|arg| arg.val));
let retval = Call(bcx, fn_ptr, llargs.as_slice(), []);
if type_is_zero_size(ccx, f.sig.output) || fcx.llretptr.get().is_some() {
RetVoid(bcx);
} else {
Ret(bcx, retval);
}
// HACK(eddyb) finish_fn cannot be used here, we returned directly.
debuginfo::clear_source_location(&fcx);
fcx.cleanup();
llfn
}
pub fn | <'a>(bcx: &'a Block<'a>,
closure_ty: ty::t,
def: def::Def,
fn_ptr: ValueRef)
-> DatumBlock<'a, Expr> {
let scratch = rvalue_scratch_datum(bcx, closure_ty, "__adjust");
let wrapper = get_wrapper_for_bare_fn(bcx.ccx(), closure_ty, def, fn_ptr, true);
fill_fn_pair(bcx, scratch.val, wrapper, C_null(Type::i8p(bcx.ccx())));
DatumBlock::new(bcx, scratch.to_expr_datum())
}
| make_closure_from_bare_fn | identifier_name |
closure.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use back::abi;
use back::link::mangle_internal_name_by_path_and_seq;
use driver::config::FullDebugInfo;
use lib::llvm::ValueRef;
use middle::def;
use middle::freevars;
use middle::lang_items::ClosureExchangeMallocFnLangItem;
use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::common::*;
use middle::trans::datum::{Datum, DatumBlock, Expr, Lvalue, rvalue_scratch_datum};
use middle::trans::debuginfo;
use middle::trans::expr;
use middle::trans::machine::llsize_of;
use middle::trans::type_of::*;
use middle::trans::type_::Type;
use middle::ty;
use util::ppaux::Repr;
use util::ppaux::ty_to_str;
use arena::TypedArena;
use syntax::ast;
// ___Good to know (tm)__________________________________________________
//
// The layout of a closure environment in memory is
// roughly as follows:
//
// struct rust_opaque_box { // see rust_internal.h
// unsigned ref_count; // obsolete (part of @T's header)
// fn(void*) *drop_glue; // destructor (for proc)
// rust_opaque_box *prev; // obsolete (part of @T's header)
// rust_opaque_box *next; // obsolete (part of @T's header)
// struct closure_data {
// upvar1_t upvar1;
// ...
// upvarN_t upvarN;
// }
// };
//
// Note that the closure is itself a rust_opaque_box. This is true
// even for ~fn and ||, because we wish to keep binary compatibility
// between all kinds of closures. The allocation strategy for this
// closure depends on the closure type. For a sendfn, the closure
// (and the referenced type descriptors) will be allocated in the
// exchange heap. For a fn, the closure is allocated in the task heap
// and is reference counted. For a block, the closure is allocated on
// the stack.
//
// ## Opaque closures and the embedded type descriptor ##
//
// One interesting part of closures is that they encapsulate the data
// that they close over. So when I have a ptr to a closure, I do not
// know how many type descriptors it contains nor what upvars are
// captured within. That means I do not know precisely how big it is
// nor where its fields are located. This is called an "opaque
// closure".
//
// Typically an opaque closure suffices because we only manipulate it
// by ptr. The routine Type::at_box().ptr_to() returns an appropriate
// type for such an opaque closure; it allows access to the box fields,
// but not the closure_data itself.
//
// But sometimes, such as when cloning or freeing a closure, we need
// to know the full information. That is where the type descriptor
// that defines the closure comes in handy. We can use its take and
// drop glue functions to allocate/free data as needed.
//
// ## Subtleties concerning alignment ##
//
// It is important that we be able to locate the closure data *without
// knowing the kind of data that is being bound*. This can be tricky
// because the alignment requirements of the bound data affects the
// alignment requires of the closure_data struct as a whole. However,
// right now this is a non-issue in any case, because the size of the
// rust_opaque_box header is always a multiple of 16-bytes, which is
// the maximum alignment requirement we ever have to worry about.
//
// The only reason alignment matters is that, in order to learn what data
// is bound, we would normally first load the type descriptors: but their
// location is ultimately depend on their content! There is, however, a
// workaround. We can load the tydesc from the rust_opaque_box, which
// describes the closure_data struct and has self-contained derived type
// descriptors, and read the alignment from there. It's just annoying to
// do. Hopefully should this ever become an issue we'll have monomorphized
// and type descriptors will all be a bad dream.
//
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
pub struct EnvValue {
action: freevars::CaptureMode,
datum: Datum<Lvalue>
}
impl EnvValue {
pub fn to_str(&self, ccx: &CrateContext) -> String {
format!("{}({})", self.action, self.datum.to_str(ccx))
}
}
// Given a closure ty, emits a corresponding tuple ty
pub fn mk_closure_tys(tcx: &ty::ctxt,
bound_values: &[EnvValue])
-> ty::t {
// determine the types of the values in the env. Note that this
// is the actual types that will be stored in the map, not the
// logical types as the user sees them, so by-ref upvars must be
// converted to ptrs.
let bound_tys = bound_values.iter().map(|bv| {
match bv.action {
freevars::CaptureByValue => bv.datum.ty,
freevars::CaptureByRef => ty::mk_mut_ptr(tcx, bv.datum.ty)
}
}).collect();
let cdata_ty = ty::mk_tup(tcx, bound_tys);
debug!("cdata_ty={}", ty_to_str(tcx, cdata_ty));
return cdata_ty;
}
fn tuplify_box_ty(tcx: &ty::ctxt, t: ty::t) -> ty::t {
let ptr = ty::mk_imm_ptr(tcx, ty::mk_i8());
ty::mk_tup(tcx, vec!(ty::mk_uint(), ty::mk_nil_ptr(tcx), ptr, ptr, t))
}
fn allocate_cbox<'a>(bcx: &'a Block<'a>,
store: ty::TraitStore,
cdata_ty: ty::t)
-> Result<'a> {
let _icx = push_ctxt("closure::allocate_cbox");
let tcx = bcx.tcx();
// Allocate and initialize the box:
match store {
ty::UniqTraitStore => {
let ty = type_of(bcx.ccx(), cdata_ty);
let size = llsize_of(bcx.ccx(), ty);
// we treat proc as @ here, which isn't ideal
malloc_raw_dyn_managed(bcx, cdata_ty, ClosureExchangeMallocFnLangItem, size)
}
ty::RegionTraitStore(..) => {
let cbox_ty = tuplify_box_ty(tcx, cdata_ty);
let llbox = alloc_ty(bcx, cbox_ty, "__closure");
Result::new(bcx, llbox)
}
}
}
pub struct ClosureResult<'a> {
llbox: ValueRef, // llvalue of ptr to closure
cdata_ty: ty::t, // type of the closure data
bcx: &'a Block<'a> // final bcx
}
// Given a block context and a list of tydescs and values to bind
// construct a closure out of them. If copying is true, it is a
// heap allocated closure that copies the upvars into environment.
// Otherwise, it is stack allocated and copies pointers to the upvars.
pub fn store_environment<'a>(
bcx: &'a Block<'a>,
bound_values: Vec<EnvValue>,
store: ty::TraitStore)
-> ClosureResult<'a> {
let _icx = push_ctxt("closure::store_environment");
let ccx = bcx.ccx();
let tcx = ccx.tcx();
// compute the type of the closure
let cdata_ty = mk_closure_tys(tcx, bound_values.as_slice());
// cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a
// tuple. This could be a ptr in uniq or a box or on stack,
// whatever.
let cbox_ty = tuplify_box_ty(tcx, cdata_ty);
let cboxptr_ty = ty::mk_ptr(tcx, ty::mt {ty:cbox_ty, mutbl:ast::MutImmutable});
let llboxptr_ty = type_of(ccx, cboxptr_ty);
// If there are no bound values, no point in allocating anything.
if bound_values.is_empty() {
return ClosureResult {llbox: C_null(llboxptr_ty),
cdata_ty: cdata_ty,
bcx: bcx};
}
// allocate closure in the heap
let Result {bcx: bcx, val: llbox} = allocate_cbox(bcx, store, cdata_ty);
let llbox = PointerCast(bcx, llbox, llboxptr_ty);
debug!("tuplify_box_ty = {}", ty_to_str(tcx, cbox_ty));
// Copy expr values into boxed bindings.
let mut bcx = bcx;
for (i, bv) in bound_values.move_iter().enumerate() {
debug!("Copy {} into closure", bv.to_str(ccx));
if ccx.sess().asm_comments() {
add_comment(bcx, format!("Copy {} into closure",
bv.to_str(ccx)).as_slice());
}
let bound_data = GEPi(bcx, llbox, [0u, abi::box_field_body, i]);
match bv.action {
freevars::CaptureByValue => {
bcx = bv.datum.store_to(bcx, bound_data);
}
freevars::CaptureByRef => {
Store(bcx, bv.datum.to_llref(), bound_data);
}
}
}
ClosureResult { llbox: llbox, cdata_ty: cdata_ty, bcx: bcx }
}
// Given a context and a list of upvars, build a closure. This just
// collects the upvars and packages them up for store_environment.
fn build_closure<'a>(bcx0: &'a Block<'a>,
freevar_mode: freevars::CaptureMode,
freevars: &Vec<freevars::freevar_entry>,
store: ty::TraitStore)
-> ClosureResult<'a>
{
let _icx = push_ctxt("closure::build_closure");
// If we need to, package up the iterator body to call
let bcx = bcx0;
// Package up the captured upvars
let mut env_vals = Vec::new();
for freevar in freevars.iter() {
let datum = expr::trans_local_var(bcx, freevar.def);
env_vals.push(EnvValue {action: freevar_mode, datum: datum});
}
store_environment(bcx, env_vals, store)
}
// Given an enclosing block context, a new function context, a closure type,
// and a list of upvars, generate code to load and populate the environment
// with the upvars and type descriptors.
fn load_environment<'a>(bcx: &'a Block<'a>,
cdata_ty: ty::t,
freevars: &Vec<freevars::freevar_entry>,
store: ty::TraitStore)
-> &'a Block<'a> {
let _icx = push_ctxt("closure::load_environment");
// Don't bother to create the block if there's nothing to load
if freevars.len() == 0 {
return bcx;
}
// Load a pointer to the closure data, skipping over the box header:
let llcdata = at_box_body(bcx, cdata_ty, bcx.fcx.llenv.unwrap());
// Store the pointer to closure data in an alloca for debug info because that's what the
// llvm.dbg.declare intrinsic expects
let env_pointer_alloca = if bcx.sess().opts.debuginfo == FullDebugInfo {
let alloc = alloc_ty(bcx, ty::mk_mut_ptr(bcx.tcx(), cdata_ty), "__debuginfo_env_ptr");
Store(bcx, llcdata, alloc);
Some(alloc)
} else {
None
};
// Populate the upvars from the environment
let mut i = 0u;
for freevar in freevars.iter() {
let mut upvarptr = GEPi(bcx, llcdata, [0u, i]);
match store {
ty::RegionTraitStore(..) => { upvarptr = Load(bcx, upvarptr); }
ty::UniqTraitStore => {}
}
let def_id = freevar.def.def_id();
bcx.fcx.llupvars.borrow_mut().insert(def_id.node, upvarptr);
for &env_pointer_alloca in env_pointer_alloca.iter() {
debuginfo::create_captured_var_metadata(
bcx,
def_id.node,
cdata_ty,
env_pointer_alloca,
i,
store,
freevar.span);
}
i += 1u;
}
bcx
}
fn fill_fn_pair(bcx: &Block, pair: ValueRef, llfn: ValueRef, llenvptr: ValueRef) {
Store(bcx, llfn, GEPi(bcx, pair, [0u, abi::fn_field_code]));
let llenvptr = PointerCast(bcx, llenvptr, Type::i8p(bcx.ccx()));
Store(bcx, llenvptr, GEPi(bcx, pair, [0u, abi::fn_field_box]));
}
pub fn trans_expr_fn<'a>(
bcx: &'a Block<'a>,
store: ty::TraitStore,
decl: &ast::FnDecl,
body: &ast::Block,
id: ast::NodeId,
dest: expr::Dest)
-> &'a Block<'a> {
/*!
*
* Translates the body of a closure expression.
*
* - `store`
* - `decl`
* - `body`
* - `id`: The id of the closure expression.
* - `cap_clause`: information about captured variables, if any.
* - `dest`: where to write the closure value, which must be a
(fn ptr, env) pair
*/
let _icx = push_ctxt("closure::trans_expr_fn");
let dest_addr = match dest {
expr::SaveIn(p) => p,
expr::Ignore => {
return bcx; // closure construction is non-side-effecting
}
};
let ccx = bcx.ccx();
let tcx = bcx.tcx();
let fty = node_id_type(bcx, id);
let s = tcx.map.with_path(id, |path| {
mangle_internal_name_by_path_and_seq(path, "closure")
});
let llfn = decl_internal_rust_fn(ccx, fty, s.as_slice());
// set an inline hint for all closures
set_inline_hint(llfn);
let freevar_mode = freevars::get_capture_mode(tcx, id);
let freevars: Vec<freevars::freevar_entry> =
freevars::with_freevars(
tcx, id,
|fv| fv.iter().map(|&fv| fv).collect());
let ClosureResult {llbox, cdata_ty, bcx} =
build_closure(bcx, freevar_mode, &freevars, store);
trans_closure(ccx, decl, body, llfn,
bcx.fcx.param_substs, id,
[], ty::ty_fn_ret(fty),
|bcx| load_environment(bcx, cdata_ty, &freevars, store));
fill_fn_pair(bcx, dest_addr, llfn, llbox);
bcx
}
pub fn get_wrapper_for_bare_fn(ccx: &CrateContext,
closure_ty: ty::t,
def: def::Def,
fn_ptr: ValueRef,
is_local: bool) -> ValueRef | debug!("get_wrapper_for_bare_fn(closure_ty={})", closure_ty.repr(tcx));
let f = match ty::get(closure_ty).sty {
ty::ty_closure(ref f) => f,
_ => {
ccx.sess().bug(format!("get_wrapper_for_bare_fn: \
expected a closure ty, got {}",
closure_ty.repr(tcx)).as_slice());
}
};
let name = ty::with_path(tcx, def_id, |path| {
mangle_internal_name_by_path_and_seq(path, "as_closure")
});
let llfn = if is_local {
decl_internal_rust_fn(ccx, closure_ty, name.as_slice())
} else {
decl_rust_fn(ccx, closure_ty, name.as_slice())
};
ccx.closure_bare_wrapper_cache.borrow_mut().insert(fn_ptr, llfn);
// This is only used by statics inlined from a different crate.
if!is_local {
// Don't regenerate the wrapper, just reuse the original one.
return llfn;
}
let _icx = push_ctxt("closure::get_wrapper_for_bare_fn");
let arena = TypedArena::new();
let empty_param_substs = param_substs::empty();
let fcx = new_fn_ctxt(ccx, llfn, -1, true, f.sig.output,
&empty_param_substs, None, &arena);
init_function(&fcx, true, f.sig.output);
let bcx = fcx.entry_bcx.borrow().clone().unwrap();
let args = create_datums_for_fn_args(&fcx,
ty::ty_fn_args(closure_ty)
.as_slice());
let mut llargs = Vec::new();
match fcx.llretptr.get() {
Some(llretptr) => {
llargs.push(llretptr);
}
None => {}
}
llargs.extend(args.iter().map(|arg| arg.val));
let retval = Call(bcx, fn_ptr, llargs.as_slice(), []);
if type_is_zero_size(ccx, f.sig.output) || fcx.llretptr.get().is_some() {
RetVoid(bcx);
} else {
Ret(bcx, retval);
}
// HACK(eddyb) finish_fn cannot be used here, we returned directly.
debuginfo::clear_source_location(&fcx);
fcx.cleanup();
llfn
}
pub fn make_closure_from_bare_fn<'a>(bcx: &'a Block<'a>,
closure_ty: ty::t,
def: def::Def,
fn_ptr: ValueRef)
-> DatumBlock<'a, Expr> {
let scratch = rvalue_scratch_datum(bcx, closure_ty, "__adjust");
let wrapper = get_wrapper_for_bare_fn(bcx.ccx(), closure_ty, def, fn_ptr, true);
fill_fn_pair(bcx, scratch.val, wrapper, C_null(Type::i8p(bcx.ccx())));
DatumBlock::new(bcx, scratch.to_expr_datum())
}
| {
let def_id = match def {
def::DefFn(did, _) | def::DefStaticMethod(did, _, _) |
def::DefVariant(_, did, _) | def::DefStruct(did) => did,
_ => {
ccx.sess().bug(format!("get_wrapper_for_bare_fn: \
expected a statically resolved fn, got \
{:?}",
def).as_slice());
}
};
match ccx.closure_bare_wrapper_cache.borrow().find(&fn_ptr) {
Some(&llval) => return llval,
None => {}
}
let tcx = ccx.tcx();
| identifier_body |
symmetriccipher.rs | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate crypto;
extern crate rand;
use crypto::{ symmetriccipher, buffer, aes, blockmodes };
use crypto::buffer::{ ReadBuffer, WriteBuffer, BufferResult };
use rand::{ Rng, OsRng };
// Encrypt a buffer with the given key and iv using
// AES-256/CBC/Pkcs encryption.
fn encrypt(data: &[u8], key: &[u8], iv: &[u8]) -> Result<Vec<u8>, symmetriccipher::SymmetricCipherError> {
// Create an encryptor instance of the best performing
// type available for the platform.
let mut encryptor = aes::cbc_encryptor(
aes::KeySize::KeySize256,
key,
iv,
blockmodes::PkcsPadding);
// Each encryption operation encrypts some data from
// an input buffer into an output buffer. Those buffers
// must be instances of RefReaderBuffer and RefWriteBuffer
// (respectively) which keep track of how much data has been
// read from or written to them.
let mut final_result = Vec::<u8>::new();
let mut read_buffer = buffer::RefReadBuffer::new(data);
let mut buffer = [0; 4096];
let mut write_buffer = buffer::RefWriteBuffer::new(&mut buffer);
// Each encryption operation will "make progress". "Making progress"
// is a bit loosely defined, but basically, at the end of each operation
// either BufferUnderflow or BufferOverflow will be returned (unless
// there was an error). If the return value is BufferUnderflow, it means
// that the operation ended while wanting more input data. If the return
// value is BufferOverflow, it means that the operation ended because it
// needed more space to output data. As long as the next call to the encryption
// operation provides the space that was requested (either more input data
// or more output space), the operation is guaranteed to get closer to
// completing the full operation - ie: "make progress".
//
// Here, we pass the data to encrypt to the enryptor along with a fixed-size
// output buffer. The 'true' flag indicates that the end of the data that
// is to be encrypted is included in the input buffer (which is true, since
// the input data includes all the data to encrypt). After each call, we copy
// any output data to our result Vec. If we get a BufferOverflow, we keep
// going in the loop since it means that there is more work to do. We can
// complete as soon as we get a BufferUnderflow since the encryptor is telling
// us that it stopped processing data due to not having any more data in the
// input buffer.
loop {
let result = try!(encryptor.encrypt(&mut read_buffer, &mut write_buffer, true));
// "write_buffer.take_read_buffer().take_remaining()" means:
// from the writable buffer, create a new readable buffer which
// contains all data that has been written, and then access all
// of that data as a slice.
final_result.extend(write_buffer.take_read_buffer().take_remaining().iter().map(|&i| i));
match result {
BufferResult::BufferUnderflow => break,
BufferResult::BufferOverflow => { }
}
}
Ok(final_result)
}
// Decrypts a buffer with the given key and iv using
// AES-256/CBC/Pkcs encryption.
//
// This function is very similar to encrypt(), so, please reference
// comments in that function. In non-example code, if desired, it is possible to
// share much of the implementation using closures to hide the operation
// being performed. However, such code would make this example less clear.
fn decrypt(encrypted_data: &[u8], key: &[u8], iv: &[u8]) -> Result<Vec<u8>, symmetriccipher::SymmetricCipherError> {
let mut decryptor = aes::cbc_decryptor(
aes::KeySize::KeySize256,
key,
iv,
blockmodes::PkcsPadding);
let mut final_result = Vec::<u8>::new();
let mut read_buffer = buffer::RefReadBuffer::new(encrypted_data);
let mut buffer = [0; 4096];
let mut write_buffer = buffer::RefWriteBuffer::new(&mut buffer);
loop {
let result = try!(decryptor.decrypt(&mut read_buffer, &mut write_buffer, true));
final_result.extend(write_buffer.take_read_buffer().take_remaining().iter().map(|&i| i));
match result {
BufferResult::BufferUnderflow => break,
BufferResult::BufferOverflow => { }
}
}
Ok(final_result)
}
fn | () {
let message = "Hello World!";
let mut key: [u8; 32] = [0; 32];
let mut iv: [u8; 16] = [0; 16];
// In a real program, the key and iv may be determined
// using some other mechanism. If a password is to be used
// as a key, an algorithm like PBKDF2, Bcrypt, or Scrypt (all
// supported by Rust-Crypto!) would be a good choice to derive
// a password. For the purposes of this example, the key and
// iv are just random values.
let mut rng = OsRng::new().ok().unwrap();
rng.fill_bytes(&mut key);
rng.fill_bytes(&mut iv);
let encrypted_data = encrypt(message.as_bytes(), &key, &iv).ok().unwrap();
let decrypted_data = decrypt(&encrypted_data[..], &key, &iv).ok().unwrap();
assert!(message.as_bytes() == &decrypted_data[..]);
}
| main | identifier_name |
symmetriccipher.rs | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate crypto;
extern crate rand;
use crypto::{ symmetriccipher, buffer, aes, blockmodes };
use crypto::buffer::{ ReadBuffer, WriteBuffer, BufferResult };
use rand::{ Rng, OsRng };
// Encrypt a buffer with the given key and iv using
// AES-256/CBC/Pkcs encryption.
fn encrypt(data: &[u8], key: &[u8], iv: &[u8]) -> Result<Vec<u8>, symmetriccipher::SymmetricCipherError> {
// Create an encryptor instance of the best performing
// type available for the platform.
let mut encryptor = aes::cbc_encryptor(
aes::KeySize::KeySize256,
key,
iv,
blockmodes::PkcsPadding);
// Each encryption operation encrypts some data from
// an input buffer into an output buffer. Those buffers
// must be instances of RefReaderBuffer and RefWriteBuffer
// (respectively) which keep track of how much data has been
// read from or written to them.
let mut final_result = Vec::<u8>::new();
let mut read_buffer = buffer::RefReadBuffer::new(data);
let mut buffer = [0; 4096];
let mut write_buffer = buffer::RefWriteBuffer::new(&mut buffer);
// Each encryption operation will "make progress". "Making progress"
// is a bit loosely defined, but basically, at the end of each operation
// either BufferUnderflow or BufferOverflow will be returned (unless
// there was an error). If the return value is BufferUnderflow, it means
// that the operation ended while wanting more input data. If the return
// value is BufferOverflow, it means that the operation ended because it
// needed more space to output data. As long as the next call to the encryption
// operation provides the space that was requested (either more input data
// or more output space), the operation is guaranteed to get closer to
// completing the full operation - ie: "make progress".
//
// Here, we pass the data to encrypt to the enryptor along with a fixed-size
// output buffer. The 'true' flag indicates that the end of the data that
// is to be encrypted is included in the input buffer (which is true, since
// the input data includes all the data to encrypt). After each call, we copy
// any output data to our result Vec. If we get a BufferOverflow, we keep
// going in the loop since it means that there is more work to do. We can
// complete as soon as we get a BufferUnderflow since the encryptor is telling
// us that it stopped processing data due to not having any more data in the
// input buffer.
loop {
let result = try!(encryptor.encrypt(&mut read_buffer, &mut write_buffer, true));
// "write_buffer.take_read_buffer().take_remaining()" means:
// from the writable buffer, create a new readable buffer which
// contains all data that has been written, and then access all
// of that data as a slice.
final_result.extend(write_buffer.take_read_buffer().take_remaining().iter().map(|&i| i));
| BufferResult::BufferUnderflow => break,
BufferResult::BufferOverflow => { }
}
}
Ok(final_result)
}
// Decrypts a buffer with the given key and iv using
// AES-256/CBC/Pkcs encryption.
//
// This function is very similar to encrypt(), so, please reference
// comments in that function. In non-example code, if desired, it is possible to
// share much of the implementation using closures to hide the operation
// being performed. However, such code would make this example less clear.
fn decrypt(encrypted_data: &[u8], key: &[u8], iv: &[u8]) -> Result<Vec<u8>, symmetriccipher::SymmetricCipherError> {
let mut decryptor = aes::cbc_decryptor(
aes::KeySize::KeySize256,
key,
iv,
blockmodes::PkcsPadding);
let mut final_result = Vec::<u8>::new();
let mut read_buffer = buffer::RefReadBuffer::new(encrypted_data);
let mut buffer = [0; 4096];
let mut write_buffer = buffer::RefWriteBuffer::new(&mut buffer);
loop {
let result = try!(decryptor.decrypt(&mut read_buffer, &mut write_buffer, true));
final_result.extend(write_buffer.take_read_buffer().take_remaining().iter().map(|&i| i));
match result {
BufferResult::BufferUnderflow => break,
BufferResult::BufferOverflow => { }
}
}
Ok(final_result)
}
fn main() {
let message = "Hello World!";
let mut key: [u8; 32] = [0; 32];
let mut iv: [u8; 16] = [0; 16];
// In a real program, the key and iv may be determined
// using some other mechanism. If a password is to be used
// as a key, an algorithm like PBKDF2, Bcrypt, or Scrypt (all
// supported by Rust-Crypto!) would be a good choice to derive
// a password. For the purposes of this example, the key and
// iv are just random values.
let mut rng = OsRng::new().ok().unwrap();
rng.fill_bytes(&mut key);
rng.fill_bytes(&mut iv);
let encrypted_data = encrypt(message.as_bytes(), &key, &iv).ok().unwrap();
let decrypted_data = decrypt(&encrypted_data[..], &key, &iv).ok().unwrap();
assert!(message.as_bytes() == &decrypted_data[..]);
} | match result { | random_line_split |
symmetriccipher.rs | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate crypto;
extern crate rand;
use crypto::{ symmetriccipher, buffer, aes, blockmodes };
use crypto::buffer::{ ReadBuffer, WriteBuffer, BufferResult };
use rand::{ Rng, OsRng };
// Encrypt a buffer with the given key and iv using
// AES-256/CBC/Pkcs encryption.
fn encrypt(data: &[u8], key: &[u8], iv: &[u8]) -> Result<Vec<u8>, symmetriccipher::SymmetricCipherError> {
// Create an encryptor instance of the best performing
// type available for the platform.
let mut encryptor = aes::cbc_encryptor(
aes::KeySize::KeySize256,
key,
iv,
blockmodes::PkcsPadding);
// Each encryption operation encrypts some data from
// an input buffer into an output buffer. Those buffers
// must be instances of RefReaderBuffer and RefWriteBuffer
// (respectively) which keep track of how much data has been
// read from or written to them.
let mut final_result = Vec::<u8>::new();
let mut read_buffer = buffer::RefReadBuffer::new(data);
let mut buffer = [0; 4096];
let mut write_buffer = buffer::RefWriteBuffer::new(&mut buffer);
// Each encryption operation will "make progress". "Making progress"
// is a bit loosely defined, but basically, at the end of each operation
// either BufferUnderflow or BufferOverflow will be returned (unless
// there was an error). If the return value is BufferUnderflow, it means
// that the operation ended while wanting more input data. If the return
// value is BufferOverflow, it means that the operation ended because it
// needed more space to output data. As long as the next call to the encryption
// operation provides the space that was requested (either more input data
// or more output space), the operation is guaranteed to get closer to
// completing the full operation - ie: "make progress".
//
// Here, we pass the data to encrypt to the enryptor along with a fixed-size
// output buffer. The 'true' flag indicates that the end of the data that
// is to be encrypted is included in the input buffer (which is true, since
// the input data includes all the data to encrypt). After each call, we copy
// any output data to our result Vec. If we get a BufferOverflow, we keep
// going in the loop since it means that there is more work to do. We can
// complete as soon as we get a BufferUnderflow since the encryptor is telling
// us that it stopped processing data due to not having any more data in the
// input buffer.
loop {
let result = try!(encryptor.encrypt(&mut read_buffer, &mut write_buffer, true));
// "write_buffer.take_read_buffer().take_remaining()" means:
// from the writable buffer, create a new readable buffer which
// contains all data that has been written, and then access all
// of that data as a slice.
final_result.extend(write_buffer.take_read_buffer().take_remaining().iter().map(|&i| i));
match result {
BufferResult::BufferUnderflow => break,
BufferResult::BufferOverflow => { }
}
}
Ok(final_result)
}
// Decrypts a buffer with the given key and iv using
// AES-256/CBC/Pkcs encryption.
//
// This function is very similar to encrypt(), so, please reference
// comments in that function. In non-example code, if desired, it is possible to
// share much of the implementation using closures to hide the operation
// being performed. However, such code would make this example less clear.
fn decrypt(encrypted_data: &[u8], key: &[u8], iv: &[u8]) -> Result<Vec<u8>, symmetriccipher::SymmetricCipherError> |
Ok(final_result)
}
fn main() {
let message = "Hello World!";
let mut key: [u8; 32] = [0; 32];
let mut iv: [u8; 16] = [0; 16];
// In a real program, the key and iv may be determined
// using some other mechanism. If a password is to be used
// as a key, an algorithm like PBKDF2, Bcrypt, or Scrypt (all
// supported by Rust-Crypto!) would be a good choice to derive
// a password. For the purposes of this example, the key and
// iv are just random values.
let mut rng = OsRng::new().ok().unwrap();
rng.fill_bytes(&mut key);
rng.fill_bytes(&mut iv);
let encrypted_data = encrypt(message.as_bytes(), &key, &iv).ok().unwrap();
let decrypted_data = decrypt(&encrypted_data[..], &key, &iv).ok().unwrap();
assert!(message.as_bytes() == &decrypted_data[..]);
}
| {
let mut decryptor = aes::cbc_decryptor(
aes::KeySize::KeySize256,
key,
iv,
blockmodes::PkcsPadding);
let mut final_result = Vec::<u8>::new();
let mut read_buffer = buffer::RefReadBuffer::new(encrypted_data);
let mut buffer = [0; 4096];
let mut write_buffer = buffer::RefWriteBuffer::new(&mut buffer);
loop {
let result = try!(decryptor.decrypt(&mut read_buffer, &mut write_buffer, true));
final_result.extend(write_buffer.take_read_buffer().take_remaining().iter().map(|&i| i));
match result {
BufferResult::BufferUnderflow => break,
BufferResult::BufferOverflow => { }
}
} | identifier_body |
webdriver_msg.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![allow(missing_docs)]
use cookie::Cookie;
use euclid::default::Rect;
use hyper_serde::Serde;
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::BrowsingContextId;
use servo_url::ServoUrl;
use std::collections::HashMap; | pub enum WebDriverScriptCommand {
AddCookie(
#[serde(
deserialize_with = "::hyper_serde::deserialize",
serialize_with = "::hyper_serde::serialize"
)]
Cookie<'static>,
IpcSender<Result<(), WebDriverCookieError>>,
),
DeleteCookies(IpcSender<Result<(), ErrorStatus>>),
ExecuteScript(String, IpcSender<WebDriverJSResult>),
ExecuteAsyncScript(String, IpcSender<WebDriverJSResult>),
FindElementCSS(String, IpcSender<Result<Option<String>, ErrorStatus>>),
FindElementLinkText(String, bool, IpcSender<Result<Option<String>, ErrorStatus>>),
FindElementTagName(String, IpcSender<Result<Option<String>, ErrorStatus>>),
FindElementsCSS(String, IpcSender<Result<Vec<String>, ErrorStatus>>),
FindElementsLinkText(String, bool, IpcSender<Result<Vec<String>, ErrorStatus>>),
FindElementsTagName(String, IpcSender<Result<Vec<String>, ErrorStatus>>),
FindElementElementCSS(
String,
String,
IpcSender<Result<Option<String>, ErrorStatus>>,
),
FindElementElementLinkText(
String,
String,
bool,
IpcSender<Result<Option<String>, ErrorStatus>>,
),
FindElementElementTagName(
String,
String,
IpcSender<Result<Option<String>, ErrorStatus>>,
),
FindElementElementsCSS(String, String, IpcSender<Result<Vec<String>, ErrorStatus>>),
FindElementElementsLinkText(
String,
String,
bool,
IpcSender<Result<Vec<String>, ErrorStatus>>,
),
FindElementElementsTagName(String, String, IpcSender<Result<Vec<String>, ErrorStatus>>),
FocusElement(String, IpcSender<Result<(), ErrorStatus>>),
ElementClick(String, IpcSender<Result<Option<String>, ErrorStatus>>),
GetActiveElement(IpcSender<Option<String>>),
GetCookie(String, IpcSender<Vec<Serde<Cookie<'static>>>>),
GetCookies(IpcSender<Vec<Serde<Cookie<'static>>>>),
GetElementAttribute(
String,
String,
IpcSender<Result<Option<String>, ErrorStatus>>,
),
GetElementProperty(
String,
String,
IpcSender<Result<WebDriverJSValue, ErrorStatus>>,
),
GetElementCSS(String, String, IpcSender<Result<String, ErrorStatus>>),
GetElementRect(String, IpcSender<Result<Rect<f64>, ErrorStatus>>),
GetElementTagName(String, IpcSender<Result<String, ErrorStatus>>),
GetElementText(String, IpcSender<Result<String, ErrorStatus>>),
GetElementInViewCenterPoint(String, IpcSender<Result<Option<(i64, i64)>, ErrorStatus>>),
GetBoundingClientRect(String, IpcSender<Result<Rect<f32>, ErrorStatus>>),
GetBrowsingContextId(
WebDriverFrameId,
IpcSender<Result<BrowsingContextId, ErrorStatus>>,
),
GetUrl(IpcSender<ServoUrl>),
GetPageSource(IpcSender<Result<String, ErrorStatus>>),
IsEnabled(String, IpcSender<Result<bool, ErrorStatus>>),
IsSelected(String, IpcSender<Result<bool, ErrorStatus>>),
GetTitle(IpcSender<String>),
}
#[derive(Debug, Deserialize, Serialize)]
pub enum WebDriverCookieError {
InvalidDomain,
UnableToSetCookie,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum WebDriverJSValue {
Undefined,
Null,
Boolean(bool),
Number(f64),
String(String),
Element(WebElement),
Frame(WebFrame),
Window(WebWindow),
ArrayLike(Vec<WebDriverJSValue>),
Object(HashMap<String, WebDriverJSValue>),
}
#[derive(Debug, Deserialize, Serialize)]
pub enum WebDriverJSError {
/// Occurs when handler received an event message for a layout channel that is not
/// associated with the current script thread
BrowsingContextNotFound,
JSError,
StaleElementReference,
Timeout,
UnknownType,
}
pub type WebDriverJSResult = Result<WebDriverJSValue, WebDriverJSError>;
#[derive(Debug, Deserialize, Serialize)]
pub enum WebDriverFrameId {
Short(u16),
Element(String),
Parent,
}
#[derive(Debug, Deserialize, Serialize)]
pub enum LoadStatus {
LoadComplete,
LoadTimeout,
} | use webdriver::common::{WebElement, WebFrame, WebWindow};
use webdriver::error::ErrorStatus;
#[derive(Debug, Deserialize, Serialize)] | random_line_split |
webdriver_msg.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![allow(missing_docs)]
use cookie::Cookie;
use euclid::default::Rect;
use hyper_serde::Serde;
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::BrowsingContextId;
use servo_url::ServoUrl;
use std::collections::HashMap;
use webdriver::common::{WebElement, WebFrame, WebWindow};
use webdriver::error::ErrorStatus;
#[derive(Debug, Deserialize, Serialize)]
pub enum WebDriverScriptCommand {
AddCookie(
#[serde(
deserialize_with = "::hyper_serde::deserialize",
serialize_with = "::hyper_serde::serialize"
)]
Cookie<'static>,
IpcSender<Result<(), WebDriverCookieError>>,
),
DeleteCookies(IpcSender<Result<(), ErrorStatus>>),
ExecuteScript(String, IpcSender<WebDriverJSResult>),
ExecuteAsyncScript(String, IpcSender<WebDriverJSResult>),
FindElementCSS(String, IpcSender<Result<Option<String>, ErrorStatus>>),
FindElementLinkText(String, bool, IpcSender<Result<Option<String>, ErrorStatus>>),
FindElementTagName(String, IpcSender<Result<Option<String>, ErrorStatus>>),
FindElementsCSS(String, IpcSender<Result<Vec<String>, ErrorStatus>>),
FindElementsLinkText(String, bool, IpcSender<Result<Vec<String>, ErrorStatus>>),
FindElementsTagName(String, IpcSender<Result<Vec<String>, ErrorStatus>>),
FindElementElementCSS(
String,
String,
IpcSender<Result<Option<String>, ErrorStatus>>,
),
FindElementElementLinkText(
String,
String,
bool,
IpcSender<Result<Option<String>, ErrorStatus>>,
),
FindElementElementTagName(
String,
String,
IpcSender<Result<Option<String>, ErrorStatus>>,
),
FindElementElementsCSS(String, String, IpcSender<Result<Vec<String>, ErrorStatus>>),
FindElementElementsLinkText(
String,
String,
bool,
IpcSender<Result<Vec<String>, ErrorStatus>>,
),
FindElementElementsTagName(String, String, IpcSender<Result<Vec<String>, ErrorStatus>>),
FocusElement(String, IpcSender<Result<(), ErrorStatus>>),
ElementClick(String, IpcSender<Result<Option<String>, ErrorStatus>>),
GetActiveElement(IpcSender<Option<String>>),
GetCookie(String, IpcSender<Vec<Serde<Cookie<'static>>>>),
GetCookies(IpcSender<Vec<Serde<Cookie<'static>>>>),
GetElementAttribute(
String,
String,
IpcSender<Result<Option<String>, ErrorStatus>>,
),
GetElementProperty(
String,
String,
IpcSender<Result<WebDriverJSValue, ErrorStatus>>,
),
GetElementCSS(String, String, IpcSender<Result<String, ErrorStatus>>),
GetElementRect(String, IpcSender<Result<Rect<f64>, ErrorStatus>>),
GetElementTagName(String, IpcSender<Result<String, ErrorStatus>>),
GetElementText(String, IpcSender<Result<String, ErrorStatus>>),
GetElementInViewCenterPoint(String, IpcSender<Result<Option<(i64, i64)>, ErrorStatus>>),
GetBoundingClientRect(String, IpcSender<Result<Rect<f32>, ErrorStatus>>),
GetBrowsingContextId(
WebDriverFrameId,
IpcSender<Result<BrowsingContextId, ErrorStatus>>,
),
GetUrl(IpcSender<ServoUrl>),
GetPageSource(IpcSender<Result<String, ErrorStatus>>),
IsEnabled(String, IpcSender<Result<bool, ErrorStatus>>),
IsSelected(String, IpcSender<Result<bool, ErrorStatus>>),
GetTitle(IpcSender<String>),
}
#[derive(Debug, Deserialize, Serialize)]
pub enum WebDriverCookieError {
InvalidDomain,
UnableToSetCookie,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum WebDriverJSValue {
Undefined,
Null,
Boolean(bool),
Number(f64),
String(String),
Element(WebElement),
Frame(WebFrame),
Window(WebWindow),
ArrayLike(Vec<WebDriverJSValue>),
Object(HashMap<String, WebDriverJSValue>),
}
#[derive(Debug, Deserialize, Serialize)]
pub enum WebDriverJSError {
/// Occurs when handler received an event message for a layout channel that is not
/// associated with the current script thread
BrowsingContextNotFound,
JSError,
StaleElementReference,
Timeout,
UnknownType,
}
pub type WebDriverJSResult = Result<WebDriverJSValue, WebDriverJSError>;
#[derive(Debug, Deserialize, Serialize)]
pub enum | {
Short(u16),
Element(String),
Parent,
}
#[derive(Debug, Deserialize, Serialize)]
pub enum LoadStatus {
LoadComplete,
LoadTimeout,
}
| WebDriverFrameId | identifier_name |
clickable_cell_renderer.rs | /*
* niepce - npc-fwk/toolkit/clickable_cell_renderer.rs | *
* Copyright (C) 2020 Hubert Figuière
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/// Trait for get clicks from cell renderer.
/// This is used to work around some bug in Gtk.
pub trait ClickableCellRenderer {
fn hit(&mut self, x: i32, y: i32);
fn x(&self) -> i32;
fn y(&self) -> i32;
fn is_hit(&self) -> bool;
fn reset_hit(&mut self);
} | random_line_split |
|
borrowck-forbid-static-unsafe-interior.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Verify that it is not possible to take the address of
// static items with usnafe interior.
use std::kinds::marker;
use std::ty::Unsafe;
struct MyUnsafe<T> {
value: Unsafe<T>
}
impl<T> MyUnsafe<T> {
fn forbidden(&self) {}
}
enum | <T> {
VariantSafe,
VariantUnsafe(Unsafe<T>)
}
static STATIC1: UnsafeEnum<int> = VariantSafe;
static STATIC2: Unsafe<int> = Unsafe{value: 1, marker1: marker::InvariantType};
static STATIC3: MyUnsafe<int> = MyUnsafe{value: STATIC2};
static STATIC4: &'static Unsafe<int> = &STATIC2;
//~^ ERROR borrow of immutable static items with unsafe interior is not allowed
struct Wrap<T> {
value: T
}
static UNSAFE: Unsafe<int> = Unsafe{value: 1, marker1: marker::InvariantType};
static WRAPPED_UNSAFE: Wrap<&'static Unsafe<int>> = Wrap { value: &UNSAFE };
//~^ ERROR borrow of immutable static items with unsafe interior is not allowed
fn main() {
let a = &STATIC1;
//~^ ERROR borrow of immutable static items with unsafe interior is not allowed
STATIC3.forbidden()
//~^ ERROR borrow of immutable static items with unsafe interior is not allowed
}
| UnsafeEnum | identifier_name |
borrowck-forbid-static-unsafe-interior.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Verify that it is not possible to take the address of
// static items with usnafe interior.
use std::kinds::marker;
use std::ty::Unsafe;
struct MyUnsafe<T> {
value: Unsafe<T>
}
impl<T> MyUnsafe<T> {
fn forbidden(&self) {}
}
enum UnsafeEnum<T> {
VariantSafe,
VariantUnsafe(Unsafe<T>)
}
static STATIC1: UnsafeEnum<int> = VariantSafe;
static STATIC2: Unsafe<int> = Unsafe{value: 1, marker1: marker::InvariantType};
static STATIC3: MyUnsafe<int> = MyUnsafe{value: STATIC2};
static STATIC4: &'static Unsafe<int> = &STATIC2;
//~^ ERROR borrow of immutable static items with unsafe interior is not allowed
struct Wrap<T> {
value: T
}
|
fn main() {
let a = &STATIC1;
//~^ ERROR borrow of immutable static items with unsafe interior is not allowed
STATIC3.forbidden()
//~^ ERROR borrow of immutable static items with unsafe interior is not allowed
} | static UNSAFE: Unsafe<int> = Unsafe{value: 1, marker1: marker::InvariantType};
static WRAPPED_UNSAFE: Wrap<&'static Unsafe<int>> = Wrap { value: &UNSAFE };
//~^ ERROR borrow of immutable static items with unsafe interior is not allowed | random_line_split |
lib.rs | use std::collections::{HashMap, HashSet};
use elasticlunr::{Index, Language};
use lazy_static::lazy_static;
use config::Config;
use errors::{bail, Result};
use library::{Library, Section};
pub const ELASTICLUNR_JS: &str = include_str!("elasticlunr.min.js");
lazy_static! {
static ref AMMONIA: ammonia::Builder<'static> = {
let mut clean_content = HashSet::new();
clean_content.insert("script");
clean_content.insert("style");
let mut builder = ammonia::Builder::new();
builder
.tags(HashSet::new())
.tag_attributes(HashMap::new())
.generic_attributes(HashSet::new())
.link_rel(None)
.allowed_classes(HashMap::new())
.clean_content_tags(clean_content);
builder
};
}
fn build_fields(config: &Config) -> Vec<String> {
let mut fields = vec![];
if config.search.include_title {
fields.push("title".to_owned());
}
if config.search.include_description {
fields.push("description".to_owned());
}
if config.search.include_content {
fields.push("body".to_owned());
}
fields
}
fn fill_index(
config: &Config,
title: &Option<String>,
description: &Option<String>,
content: &str,
) -> Vec<String> {
let mut row = vec![];
if config.search.include_title {
row.push(title.clone().unwrap_or_default());
}
if config.search.include_description {
row.push(description.clone().unwrap_or_default());
}
if config.search.include_content {
let body = AMMONIA.clean(&content).to_string();
if let Some(truncate_len) = config.search.truncate_content_length {
// Not great for unicode
// TODO: fix it like the truncate in Tera
match body.char_indices().nth(truncate_len) {
None => row.push(body),
Some((idx, _)) => row.push((&body[..idx]).to_string()),
};
} else {
row.push(body);
};
}
row
}
/// Returns the generated JSON index with all the documents of the site added using
/// the language given
/// Errors if the language given is not available in Elasticlunr
/// TODO: is making `in_search_index` apply to subsections of a `false` section useful?
pub fn build_index(lang: &str, library: &Library, config: &Config) -> Result<String> {
let language = match Language::from_code(lang) {
Some(l) => l,
None => {
bail!("Tried to build search index for language {} which is not supported", lang);
}
};
let mut index = Index::with_language(language, &build_fields(&config));
for section in library.sections_values() {
if section.lang == lang {
add_section_to_index(&mut index, section, library, config);
}
}
Ok(index.to_json())
}
fn add_section_to_index(index: &mut Index, section: &Section, library: &Library, config: &Config) {
if!section.meta.in_search_index {
return;
}
// Don't index redirecting sections
if section.meta.redirect_to.is_none() {
index.add_doc(
§ion.permalink,
&fill_index(config, §ion.meta.title, §ion.meta.description, §ion.content),
);
}
for key in §ion.pages {
let page = library.get_page_by_key(*key);
if!page.meta.in_search_index {
continue;
}
index.add_doc(
&page.permalink, | }
}
#[cfg(test)]
mod tests {
use super::*;
use config::Config;
#[test]
fn can_build_fields() {
let mut config = Config::default();
let fields = build_fields(&config);
assert_eq!(fields, vec!["title", "body"]);
config.search.include_content = false;
config.search.include_description = true;
let fields = build_fields(&config);
assert_eq!(fields, vec!["title", "description"]);
config.search.include_content = true;
let fields = build_fields(&config);
assert_eq!(fields, vec!["title", "description", "body"]);
config.search.include_title = false;
let fields = build_fields(&config);
assert_eq!(fields, vec!["description", "body"]);
}
#[test]
fn can_fill_index_default() {
let config = Config::default();
let title = Some("A title".to_string());
let description = Some("A description".to_string());
let content = "Some content".to_string();
let res = fill_index(&config, &title, &description, &content);
assert_eq!(res.len(), 2);
assert_eq!(res[0], title.unwrap());
assert_eq!(res[1], content);
}
#[test]
fn can_fill_index_description() {
let mut config = Config::default();
config.search.include_description = true;
let title = Some("A title".to_string());
let description = Some("A description".to_string());
let content = "Some content".to_string();
let res = fill_index(&config, &title, &description, &content);
assert_eq!(res.len(), 3);
assert_eq!(res[0], title.unwrap());
assert_eq!(res[1], description.unwrap());
assert_eq!(res[2], content);
}
#[test]
fn can_fill_index_truncated_content() {
let mut config = Config::default();
config.search.truncate_content_length = Some(5);
let title = Some("A title".to_string());
let description = Some("A description".to_string());
let content = "Some content".to_string();
let res = fill_index(&config, &title, &description, &content);
assert_eq!(res.len(), 2);
assert_eq!(res[0], title.unwrap());
assert_eq!(res[1], content[..5]);
}
} | &fill_index(config, &page.meta.title, &page.meta.description, &page.content),
); | random_line_split |
lib.rs | use std::collections::{HashMap, HashSet};
use elasticlunr::{Index, Language};
use lazy_static::lazy_static;
use config::Config;
use errors::{bail, Result};
use library::{Library, Section};
pub const ELASTICLUNR_JS: &str = include_str!("elasticlunr.min.js");
lazy_static! {
static ref AMMONIA: ammonia::Builder<'static> = {
let mut clean_content = HashSet::new();
clean_content.insert("script");
clean_content.insert("style");
let mut builder = ammonia::Builder::new();
builder
.tags(HashSet::new())
.tag_attributes(HashMap::new())
.generic_attributes(HashSet::new())
.link_rel(None)
.allowed_classes(HashMap::new())
.clean_content_tags(clean_content);
builder
};
}
fn build_fields(config: &Config) -> Vec<String> {
let mut fields = vec![];
if config.search.include_title {
fields.push("title".to_owned());
}
if config.search.include_description {
fields.push("description".to_owned());
}
if config.search.include_content {
fields.push("body".to_owned());
}
fields
}
fn fill_index(
config: &Config,
title: &Option<String>,
description: &Option<String>,
content: &str,
) -> Vec<String> {
let mut row = vec![];
if config.search.include_title {
row.push(title.clone().unwrap_or_default());
}
if config.search.include_description {
row.push(description.clone().unwrap_or_default());
}
if config.search.include_content {
let body = AMMONIA.clean(&content).to_string();
if let Some(truncate_len) = config.search.truncate_content_length {
// Not great for unicode
// TODO: fix it like the truncate in Tera
match body.char_indices().nth(truncate_len) {
None => row.push(body),
Some((idx, _)) => row.push((&body[..idx]).to_string()),
};
} else {
row.push(body);
};
}
row
}
/// Returns the generated JSON index with all the documents of the site added using
/// the language given
/// Errors if the language given is not available in Elasticlunr
/// TODO: is making `in_search_index` apply to subsections of a `false` section useful?
pub fn build_index(lang: &str, library: &Library, config: &Config) -> Result<String> {
let language = match Language::from_code(lang) {
Some(l) => l,
None => {
bail!("Tried to build search index for language {} which is not supported", lang);
}
};
let mut index = Index::with_language(language, &build_fields(&config));
for section in library.sections_values() {
if section.lang == lang {
add_section_to_index(&mut index, section, library, config);
}
}
Ok(index.to_json())
}
fn add_section_to_index(index: &mut Index, section: &Section, library: &Library, config: &Config) {
if!section.meta.in_search_index {
return;
}
// Don't index redirecting sections
if section.meta.redirect_to.is_none() {
index.add_doc(
§ion.permalink,
&fill_index(config, §ion.meta.title, §ion.meta.description, §ion.content),
);
}
for key in §ion.pages {
let page = library.get_page_by_key(*key);
if!page.meta.in_search_index {
continue;
}
index.add_doc(
&page.permalink,
&fill_index(config, &page.meta.title, &page.meta.description, &page.content),
);
}
}
#[cfg(test)]
mod tests {
use super::*;
use config::Config;
#[test]
fn can_build_fields() {
let mut config = Config::default();
let fields = build_fields(&config);
assert_eq!(fields, vec!["title", "body"]);
config.search.include_content = false;
config.search.include_description = true;
let fields = build_fields(&config);
assert_eq!(fields, vec!["title", "description"]);
config.search.include_content = true;
let fields = build_fields(&config);
assert_eq!(fields, vec!["title", "description", "body"]);
config.search.include_title = false;
let fields = build_fields(&config);
assert_eq!(fields, vec!["description", "body"]);
}
#[test]
fn can_fill_index_default() |
#[test]
fn can_fill_index_description() {
let mut config = Config::default();
config.search.include_description = true;
let title = Some("A title".to_string());
let description = Some("A description".to_string());
let content = "Some content".to_string();
let res = fill_index(&config, &title, &description, &content);
assert_eq!(res.len(), 3);
assert_eq!(res[0], title.unwrap());
assert_eq!(res[1], description.unwrap());
assert_eq!(res[2], content);
}
#[test]
fn can_fill_index_truncated_content() {
let mut config = Config::default();
config.search.truncate_content_length = Some(5);
let title = Some("A title".to_string());
let description = Some("A description".to_string());
let content = "Some content".to_string();
let res = fill_index(&config, &title, &description, &content);
assert_eq!(res.len(), 2);
assert_eq!(res[0], title.unwrap());
assert_eq!(res[1], content[..5]);
}
}
| {
let config = Config::default();
let title = Some("A title".to_string());
let description = Some("A description".to_string());
let content = "Some content".to_string();
let res = fill_index(&config, &title, &description, &content);
assert_eq!(res.len(), 2);
assert_eq!(res[0], title.unwrap());
assert_eq!(res[1], content);
} | identifier_body |
lib.rs | use std::collections::{HashMap, HashSet};
use elasticlunr::{Index, Language};
use lazy_static::lazy_static;
use config::Config;
use errors::{bail, Result};
use library::{Library, Section};
pub const ELASTICLUNR_JS: &str = include_str!("elasticlunr.min.js");
lazy_static! {
static ref AMMONIA: ammonia::Builder<'static> = {
let mut clean_content = HashSet::new();
clean_content.insert("script");
clean_content.insert("style");
let mut builder = ammonia::Builder::new();
builder
.tags(HashSet::new())
.tag_attributes(HashMap::new())
.generic_attributes(HashSet::new())
.link_rel(None)
.allowed_classes(HashMap::new())
.clean_content_tags(clean_content);
builder
};
}
fn | (config: &Config) -> Vec<String> {
let mut fields = vec![];
if config.search.include_title {
fields.push("title".to_owned());
}
if config.search.include_description {
fields.push("description".to_owned());
}
if config.search.include_content {
fields.push("body".to_owned());
}
fields
}
fn fill_index(
config: &Config,
title: &Option<String>,
description: &Option<String>,
content: &str,
) -> Vec<String> {
let mut row = vec![];
if config.search.include_title {
row.push(title.clone().unwrap_or_default());
}
if config.search.include_description {
row.push(description.clone().unwrap_or_default());
}
if config.search.include_content {
let body = AMMONIA.clean(&content).to_string();
if let Some(truncate_len) = config.search.truncate_content_length {
// Not great for unicode
// TODO: fix it like the truncate in Tera
match body.char_indices().nth(truncate_len) {
None => row.push(body),
Some((idx, _)) => row.push((&body[..idx]).to_string()),
};
} else {
row.push(body);
};
}
row
}
/// Returns the generated JSON index with all the documents of the site added using
/// the language given
/// Errors if the language given is not available in Elasticlunr
/// TODO: is making `in_search_index` apply to subsections of a `false` section useful?
pub fn build_index(lang: &str, library: &Library, config: &Config) -> Result<String> {
let language = match Language::from_code(lang) {
Some(l) => l,
None => {
bail!("Tried to build search index for language {} which is not supported", lang);
}
};
let mut index = Index::with_language(language, &build_fields(&config));
for section in library.sections_values() {
if section.lang == lang {
add_section_to_index(&mut index, section, library, config);
}
}
Ok(index.to_json())
}
fn add_section_to_index(index: &mut Index, section: &Section, library: &Library, config: &Config) {
if!section.meta.in_search_index {
return;
}
// Don't index redirecting sections
if section.meta.redirect_to.is_none() {
index.add_doc(
§ion.permalink,
&fill_index(config, §ion.meta.title, §ion.meta.description, §ion.content),
);
}
for key in §ion.pages {
let page = library.get_page_by_key(*key);
if!page.meta.in_search_index {
continue;
}
index.add_doc(
&page.permalink,
&fill_index(config, &page.meta.title, &page.meta.description, &page.content),
);
}
}
#[cfg(test)]
mod tests {
use super::*;
use config::Config;
#[test]
fn can_build_fields() {
let mut config = Config::default();
let fields = build_fields(&config);
assert_eq!(fields, vec!["title", "body"]);
config.search.include_content = false;
config.search.include_description = true;
let fields = build_fields(&config);
assert_eq!(fields, vec!["title", "description"]);
config.search.include_content = true;
let fields = build_fields(&config);
assert_eq!(fields, vec!["title", "description", "body"]);
config.search.include_title = false;
let fields = build_fields(&config);
assert_eq!(fields, vec!["description", "body"]);
}
#[test]
fn can_fill_index_default() {
let config = Config::default();
let title = Some("A title".to_string());
let description = Some("A description".to_string());
let content = "Some content".to_string();
let res = fill_index(&config, &title, &description, &content);
assert_eq!(res.len(), 2);
assert_eq!(res[0], title.unwrap());
assert_eq!(res[1], content);
}
#[test]
fn can_fill_index_description() {
let mut config = Config::default();
config.search.include_description = true;
let title = Some("A title".to_string());
let description = Some("A description".to_string());
let content = "Some content".to_string();
let res = fill_index(&config, &title, &description, &content);
assert_eq!(res.len(), 3);
assert_eq!(res[0], title.unwrap());
assert_eq!(res[1], description.unwrap());
assert_eq!(res[2], content);
}
#[test]
fn can_fill_index_truncated_content() {
let mut config = Config::default();
config.search.truncate_content_length = Some(5);
let title = Some("A title".to_string());
let description = Some("A description".to_string());
let content = "Some content".to_string();
let res = fill_index(&config, &title, &description, &content);
assert_eq!(res.len(), 2);
assert_eq!(res[0], title.unwrap());
assert_eq!(res[1], content[..5]);
}
}
| build_fields | identifier_name |
lib.rs | use std::collections::{HashMap, HashSet};
use elasticlunr::{Index, Language};
use lazy_static::lazy_static;
use config::Config;
use errors::{bail, Result};
use library::{Library, Section};
pub const ELASTICLUNR_JS: &str = include_str!("elasticlunr.min.js");
lazy_static! {
static ref AMMONIA: ammonia::Builder<'static> = {
let mut clean_content = HashSet::new();
clean_content.insert("script");
clean_content.insert("style");
let mut builder = ammonia::Builder::new();
builder
.tags(HashSet::new())
.tag_attributes(HashMap::new())
.generic_attributes(HashSet::new())
.link_rel(None)
.allowed_classes(HashMap::new())
.clean_content_tags(clean_content);
builder
};
}
fn build_fields(config: &Config) -> Vec<String> {
let mut fields = vec![];
if config.search.include_title {
fields.push("title".to_owned());
}
if config.search.include_description |
if config.search.include_content {
fields.push("body".to_owned());
}
fields
}
fn fill_index(
config: &Config,
title: &Option<String>,
description: &Option<String>,
content: &str,
) -> Vec<String> {
let mut row = vec![];
if config.search.include_title {
row.push(title.clone().unwrap_or_default());
}
if config.search.include_description {
row.push(description.clone().unwrap_or_default());
}
if config.search.include_content {
let body = AMMONIA.clean(&content).to_string();
if let Some(truncate_len) = config.search.truncate_content_length {
// Not great for unicode
// TODO: fix it like the truncate in Tera
match body.char_indices().nth(truncate_len) {
None => row.push(body),
Some((idx, _)) => row.push((&body[..idx]).to_string()),
};
} else {
row.push(body);
};
}
row
}
/// Returns the generated JSON index with all the documents of the site added using
/// the language given
/// Errors if the language given is not available in Elasticlunr
/// TODO: is making `in_search_index` apply to subsections of a `false` section useful?
pub fn build_index(lang: &str, library: &Library, config: &Config) -> Result<String> {
let language = match Language::from_code(lang) {
Some(l) => l,
None => {
bail!("Tried to build search index for language {} which is not supported", lang);
}
};
let mut index = Index::with_language(language, &build_fields(&config));
for section in library.sections_values() {
if section.lang == lang {
add_section_to_index(&mut index, section, library, config);
}
}
Ok(index.to_json())
}
fn add_section_to_index(index: &mut Index, section: &Section, library: &Library, config: &Config) {
if!section.meta.in_search_index {
return;
}
// Don't index redirecting sections
if section.meta.redirect_to.is_none() {
index.add_doc(
§ion.permalink,
&fill_index(config, §ion.meta.title, §ion.meta.description, §ion.content),
);
}
for key in §ion.pages {
let page = library.get_page_by_key(*key);
if!page.meta.in_search_index {
continue;
}
index.add_doc(
&page.permalink,
&fill_index(config, &page.meta.title, &page.meta.description, &page.content),
);
}
}
#[cfg(test)]
mod tests {
use super::*;
use config::Config;
#[test]
fn can_build_fields() {
let mut config = Config::default();
let fields = build_fields(&config);
assert_eq!(fields, vec!["title", "body"]);
config.search.include_content = false;
config.search.include_description = true;
let fields = build_fields(&config);
assert_eq!(fields, vec!["title", "description"]);
config.search.include_content = true;
let fields = build_fields(&config);
assert_eq!(fields, vec!["title", "description", "body"]);
config.search.include_title = false;
let fields = build_fields(&config);
assert_eq!(fields, vec!["description", "body"]);
}
#[test]
fn can_fill_index_default() {
let config = Config::default();
let title = Some("A title".to_string());
let description = Some("A description".to_string());
let content = "Some content".to_string();
let res = fill_index(&config, &title, &description, &content);
assert_eq!(res.len(), 2);
assert_eq!(res[0], title.unwrap());
assert_eq!(res[1], content);
}
#[test]
fn can_fill_index_description() {
let mut config = Config::default();
config.search.include_description = true;
let title = Some("A title".to_string());
let description = Some("A description".to_string());
let content = "Some content".to_string();
let res = fill_index(&config, &title, &description, &content);
assert_eq!(res.len(), 3);
assert_eq!(res[0], title.unwrap());
assert_eq!(res[1], description.unwrap());
assert_eq!(res[2], content);
}
#[test]
fn can_fill_index_truncated_content() {
let mut config = Config::default();
config.search.truncate_content_length = Some(5);
let title = Some("A title".to_string());
let description = Some("A description".to_string());
let content = "Some content".to_string();
let res = fill_index(&config, &title, &description, &content);
assert_eq!(res.len(), 2);
assert_eq!(res[0], title.unwrap());
assert_eq!(res[1], content[..5]);
}
}
| {
fields.push("description".to_owned());
} | conditional_block |
mod.rs | // OpenAOE: An open source reimplementation of Age of Empires (1997)
// Copyright (c) 2016 Kevin Fuller
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | // SOFTWARE.
mod component;
pub mod resource;
pub mod render_system;
pub mod system;
mod world;
pub use self::component::*;
pub use self::world::{SystemGroup, WorldPlanner, create_world_planner}; | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | random_line_split |
call.rs | use std::fmt;
use std::marker::PhantomData;
use std::sync::Arc;
use std::time::Instant;
use prost::Message;
/// A remote procedure call.
///
/// `Call` describes a remote procedure call: the remote service, the method, the required feature
/// flags, the deadline, the request, and the response type. `Call` instances are dispatched to a
/// remote server using `Proxy::send`, which returns the response future.
pub struct Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
pub(crate) service: &'static str,
pub(crate) method: &'static str,
pub(crate) required_feature_flags: &'static [u32],
pub(crate) deadline: Instant,
pub request: Arc<Req>,
_marker: PhantomData<Resp>, | }
impl<Req, Resp> Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
/// Creates a new `Call` instance.
pub fn new(
service: &'static str,
method: &'static str,
request: Arc<Req>,
deadline: Instant,
) -> Call<Req, Resp> {
Call {
service,
method,
required_feature_flags: &[],
deadline,
request,
_marker: PhantomData::default(),
}
}
/// Returns the call's remote service.
pub fn service(&self) -> &'static str {
self.service
}
/// Returns the call's remote method.
pub fn method(&self) -> &'static str {
self.method
}
/// Returns the call's deadline.
pub fn deadline(&self) -> Instant {
self.deadline
}
/// Retrieves the required feature flags of the call.
pub fn required_feature_flags(&self) -> &'static [u32] {
self.required_feature_flags
}
pub fn set_deadline(&mut self, deadline: Instant) -> &mut Call<Req, Resp> {
self.deadline = deadline;
self
}
/// Sets the required feature flags of the call.
///
/// If not set, no feature flags are sent with the call.
pub fn set_required_feature_flags(
&mut self,
required_feature_flags: &'static [u32],
) -> &mut Call<Req, Resp> {
self.required_feature_flags = required_feature_flags;
self
}
}
impl<Req, Resp> fmt::Debug for Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut dbg = f.debug_struct("Call");
dbg.field("service", &self.service);
dbg.field("method", &self.method);
if!self.required_feature_flags.is_empty() {
dbg.field("required_feature_flags", &self.required_feature_flags);
}
dbg.field("deadline", &self.deadline);
dbg.finish()
}
}
impl<Req, Resp> Clone for Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
fn clone(&self) -> Call<Req, Resp> {
Call {
service: self.service,
method: self.method,
required_feature_flags: self.required_feature_flags,
deadline: self.deadline,
request: self.request.clone(),
_marker: PhantomData::default(),
}
}
} | random_line_split |
|
call.rs | use std::fmt;
use std::marker::PhantomData;
use std::sync::Arc;
use std::time::Instant;
use prost::Message;
/// A remote procedure call.
///
/// `Call` describes a remote procedure call: the remote service, the method, the required feature
/// flags, the deadline, the request, and the response type. `Call` instances are dispatched to a
/// remote server using `Proxy::send`, which returns the response future.
pub struct Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
pub(crate) service: &'static str,
pub(crate) method: &'static str,
pub(crate) required_feature_flags: &'static [u32],
pub(crate) deadline: Instant,
pub request: Arc<Req>,
_marker: PhantomData<Resp>,
}
impl<Req, Resp> Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
/// Creates a new `Call` instance.
pub fn new(
service: &'static str,
method: &'static str,
request: Arc<Req>,
deadline: Instant,
) -> Call<Req, Resp> {
Call {
service,
method,
required_feature_flags: &[],
deadline,
request,
_marker: PhantomData::default(),
}
}
/// Returns the call's remote service.
pub fn service(&self) -> &'static str {
self.service
}
/// Returns the call's remote method.
pub fn method(&self) -> &'static str {
self.method
}
/// Returns the call's deadline.
pub fn deadline(&self) -> Instant {
self.deadline
}
/// Retrieves the required feature flags of the call.
pub fn required_feature_flags(&self) -> &'static [u32] {
self.required_feature_flags
}
pub fn set_deadline(&mut self, deadline: Instant) -> &mut Call<Req, Resp> {
self.deadline = deadline;
self
}
/// Sets the required feature flags of the call.
///
/// If not set, no feature flags are sent with the call.
pub fn set_required_feature_flags(
&mut self,
required_feature_flags: &'static [u32],
) -> &mut Call<Req, Resp> {
self.required_feature_flags = required_feature_flags;
self
}
}
impl<Req, Resp> fmt::Debug for Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut dbg = f.debug_struct("Call");
dbg.field("service", &self.service);
dbg.field("method", &self.method);
if!self.required_feature_flags.is_empty() |
dbg.field("deadline", &self.deadline);
dbg.finish()
}
}
impl<Req, Resp> Clone for Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
fn clone(&self) -> Call<Req, Resp> {
Call {
service: self.service,
method: self.method,
required_feature_flags: self.required_feature_flags,
deadline: self.deadline,
request: self.request.clone(),
_marker: PhantomData::default(),
}
}
}
| {
dbg.field("required_feature_flags", &self.required_feature_flags);
} | conditional_block |
call.rs | use std::fmt;
use std::marker::PhantomData;
use std::sync::Arc;
use std::time::Instant;
use prost::Message;
/// A remote procedure call.
///
/// `Call` describes a remote procedure call: the remote service, the method, the required feature
/// flags, the deadline, the request, and the response type. `Call` instances are dispatched to a
/// remote server using `Proxy::send`, which returns the response future.
pub struct Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
pub(crate) service: &'static str,
pub(crate) method: &'static str,
pub(crate) required_feature_flags: &'static [u32],
pub(crate) deadline: Instant,
pub request: Arc<Req>,
_marker: PhantomData<Resp>,
}
impl<Req, Resp> Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
/// Creates a new `Call` instance.
pub fn | (
service: &'static str,
method: &'static str,
request: Arc<Req>,
deadline: Instant,
) -> Call<Req, Resp> {
Call {
service,
method,
required_feature_flags: &[],
deadline,
request,
_marker: PhantomData::default(),
}
}
/// Returns the call's remote service.
pub fn service(&self) -> &'static str {
self.service
}
/// Returns the call's remote method.
pub fn method(&self) -> &'static str {
self.method
}
/// Returns the call's deadline.
pub fn deadline(&self) -> Instant {
self.deadline
}
/// Retrieves the required feature flags of the call.
pub fn required_feature_flags(&self) -> &'static [u32] {
self.required_feature_flags
}
pub fn set_deadline(&mut self, deadline: Instant) -> &mut Call<Req, Resp> {
self.deadline = deadline;
self
}
/// Sets the required feature flags of the call.
///
/// If not set, no feature flags are sent with the call.
pub fn set_required_feature_flags(
&mut self,
required_feature_flags: &'static [u32],
) -> &mut Call<Req, Resp> {
self.required_feature_flags = required_feature_flags;
self
}
}
impl<Req, Resp> fmt::Debug for Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut dbg = f.debug_struct("Call");
dbg.field("service", &self.service);
dbg.field("method", &self.method);
if!self.required_feature_flags.is_empty() {
dbg.field("required_feature_flags", &self.required_feature_flags);
}
dbg.field("deadline", &self.deadline);
dbg.finish()
}
}
impl<Req, Resp> Clone for Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
fn clone(&self) -> Call<Req, Resp> {
Call {
service: self.service,
method: self.method,
required_feature_flags: self.required_feature_flags,
deadline: self.deadline,
request: self.request.clone(),
_marker: PhantomData::default(),
}
}
}
| new | identifier_name |
call.rs | use std::fmt;
use std::marker::PhantomData;
use std::sync::Arc;
use std::time::Instant;
use prost::Message;
/// A remote procedure call.
///
/// `Call` describes a remote procedure call: the remote service, the method, the required feature
/// flags, the deadline, the request, and the response type. `Call` instances are dispatched to a
/// remote server using `Proxy::send`, which returns the response future.
pub struct Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
pub(crate) service: &'static str,
pub(crate) method: &'static str,
pub(crate) required_feature_flags: &'static [u32],
pub(crate) deadline: Instant,
pub request: Arc<Req>,
_marker: PhantomData<Resp>,
}
impl<Req, Resp> Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
/// Creates a new `Call` instance.
pub fn new(
service: &'static str,
method: &'static str,
request: Arc<Req>,
deadline: Instant,
) -> Call<Req, Resp> {
Call {
service,
method,
required_feature_flags: &[],
deadline,
request,
_marker: PhantomData::default(),
}
}
/// Returns the call's remote service.
pub fn service(&self) -> &'static str {
self.service
}
/// Returns the call's remote method.
pub fn method(&self) -> &'static str |
/// Returns the call's deadline.
pub fn deadline(&self) -> Instant {
self.deadline
}
/// Retrieves the required feature flags of the call.
pub fn required_feature_flags(&self) -> &'static [u32] {
self.required_feature_flags
}
pub fn set_deadline(&mut self, deadline: Instant) -> &mut Call<Req, Resp> {
self.deadline = deadline;
self
}
/// Sets the required feature flags of the call.
///
/// If not set, no feature flags are sent with the call.
pub fn set_required_feature_flags(
&mut self,
required_feature_flags: &'static [u32],
) -> &mut Call<Req, Resp> {
self.required_feature_flags = required_feature_flags;
self
}
}
impl<Req, Resp> fmt::Debug for Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut dbg = f.debug_struct("Call");
dbg.field("service", &self.service);
dbg.field("method", &self.method);
if!self.required_feature_flags.is_empty() {
dbg.field("required_feature_flags", &self.required_feature_flags);
}
dbg.field("deadline", &self.deadline);
dbg.finish()
}
}
impl<Req, Resp> Clone for Call<Req, Resp>
where
Req: Message +'static,
Resp: Message + Default,
{
fn clone(&self) -> Call<Req, Resp> {
Call {
service: self.service,
method: self.method,
required_feature_flags: self.required_feature_flags,
deadline: self.deadline,
request: self.request.clone(),
_marker: PhantomData::default(),
}
}
}
| {
self.method
} | identifier_body |
sujiko.rs | //! Sujiko.
//!
//! https://en.wikipedia.org/wiki/Sujiko
//! https://www.simetric.co.uk/sujiko/index.htm
extern crate puzzle_solver;
use puzzle_solver::{Puzzle,Solution,Val,VarToken};
const SIZE: usize = 3;
type Board = [[Val; SIZE]; SIZE];
fn make_sujiko(board: &Board, tl: Val, tr: Val, bl: Val, br: Val)
-> (Puzzle, Vec<Vec<VarToken>>) {
let mut sys = Puzzle::new();
let vars = sys.new_vars_with_candidates_2d(3, 3, &[1,2,3,4,5,6,7,8,9]);
sys.all_different(vars.iter().flat_map(|it| it));
sys.equals(tl, vars[0][0] + vars[0][1] + vars[1][0] + vars[1][1]);
sys.equals(tr, vars[0][1] + vars[0][2] + vars[1][1] + vars[1][2]);
sys.equals(bl, vars[1][0] + vars[1][1] + vars[2][0] + vars[2][1]);
sys.equals(br, vars[1][1] + vars[1][2] + vars[2][1] + vars[2][2]);
sys.equals(tl + tr + bl + br - (1..(9 + 1)).sum::<Val>(),
vars[0][1] + vars[1][0] + 3 * vars[1][1] + vars[1][2] + vars[2][1]);
for y in 0..SIZE {
for x in 0..SIZE {
let value = board[y][x];
if value!= 0 |
}
}
(sys, vars)
}
fn print_sujiko(dict: &Solution, vars: &Vec<Vec<VarToken>>) {
for y in 0..SIZE {
for x in 0..SIZE {
print!(" {}", dict[vars[y][x]]);
}
println!();
}
}
fn verify_sujiko(dict: &Solution, vars: &Vec<Vec<VarToken>>, expected: &Board) {
for y in 0..SIZE {
for x in 0..SIZE {
assert_eq!(dict[vars[y][x]], expected[y][x]);
}
}
}
#[test]
fn sujiko_simetric() {
let puzzle = [ [ 6,0,9 ], [ 0,0,0 ], [ 5,0,0 ] ];
let expected = [ [ 6,2,9 ], [ 8,1,3 ], [ 5,4,7 ] ];
let (mut sys, vars) = make_sujiko(&puzzle, 17, 15, 18, 15);
let dict = sys.solve_unique().expect("solution");
print_sujiko(&dict, &vars);
verify_sujiko(&dict, &vars, &expected);
println!("sujiko_simetric: {} guesses", sys.num_guesses());
}
| {
sys.set_value(vars[y][x], value);
} | conditional_block |
sujiko.rs | //! Sujiko.
//!
//! https://en.wikipedia.org/wiki/Sujiko
//! https://www.simetric.co.uk/sujiko/index.htm
extern crate puzzle_solver;
use puzzle_solver::{Puzzle,Solution,Val,VarToken};
const SIZE: usize = 3;
type Board = [[Val; SIZE]; SIZE];
fn make_sujiko(board: &Board, tl: Val, tr: Val, bl: Val, br: Val)
-> (Puzzle, Vec<Vec<VarToken>>) {
let mut sys = Puzzle::new();
let vars = sys.new_vars_with_candidates_2d(3, 3, &[1,2,3,4,5,6,7,8,9]);
sys.all_different(vars.iter().flat_map(|it| it));
sys.equals(tl, vars[0][0] + vars[0][1] + vars[1][0] + vars[1][1]);
sys.equals(tr, vars[0][1] + vars[0][2] + vars[1][1] + vars[1][2]);
sys.equals(bl, vars[1][0] + vars[1][1] + vars[2][0] + vars[2][1]);
sys.equals(br, vars[1][1] + vars[1][2] + vars[2][1] + vars[2][2]);
sys.equals(tl + tr + bl + br - (1..(9 + 1)).sum::<Val>(),
vars[0][1] + vars[1][0] + 3 * vars[1][1] + vars[1][2] + vars[2][1]);
for y in 0..SIZE {
for x in 0..SIZE {
let value = board[y][x];
if value!= 0 {
sys.set_value(vars[y][x], value);
}
}
}
(sys, vars)
}
fn print_sujiko(dict: &Solution, vars: &Vec<Vec<VarToken>>) |
fn verify_sujiko(dict: &Solution, vars: &Vec<Vec<VarToken>>, expected: &Board) {
for y in 0..SIZE {
for x in 0..SIZE {
assert_eq!(dict[vars[y][x]], expected[y][x]);
}
}
}
#[test]
fn sujiko_simetric() {
let puzzle = [ [ 6,0,9 ], [ 0,0,0 ], [ 5,0,0 ] ];
let expected = [ [ 6,2,9 ], [ 8,1,3 ], [ 5,4,7 ] ];
let (mut sys, vars) = make_sujiko(&puzzle, 17, 15, 18, 15);
let dict = sys.solve_unique().expect("solution");
print_sujiko(&dict, &vars);
verify_sujiko(&dict, &vars, &expected);
println!("sujiko_simetric: {} guesses", sys.num_guesses());
}
| {
for y in 0..SIZE {
for x in 0..SIZE {
print!(" {}", dict[vars[y][x]]);
}
println!();
}
} | identifier_body |
sujiko.rs | //! Sujiko.
//!
//! https://en.wikipedia.org/wiki/Sujiko
//! https://www.simetric.co.uk/sujiko/index.htm
extern crate puzzle_solver;
use puzzle_solver::{Puzzle,Solution,Val,VarToken};
const SIZE: usize = 3;
type Board = [[Val; SIZE]; SIZE];
fn make_sujiko(board: &Board, tl: Val, tr: Val, bl: Val, br: Val)
-> (Puzzle, Vec<Vec<VarToken>>) {
let mut sys = Puzzle::new();
let vars = sys.new_vars_with_candidates_2d(3, 3, &[1,2,3,4,5,6,7,8,9]);
sys.all_different(vars.iter().flat_map(|it| it));
sys.equals(tl, vars[0][0] + vars[0][1] + vars[1][0] + vars[1][1]);
sys.equals(tr, vars[0][1] + vars[0][2] + vars[1][1] + vars[1][2]);
sys.equals(bl, vars[1][0] + vars[1][1] + vars[2][0] + vars[2][1]);
sys.equals(br, vars[1][1] + vars[1][2] + vars[2][1] + vars[2][2]);
sys.equals(tl + tr + bl + br - (1..(9 + 1)).sum::<Val>(),
vars[0][1] + vars[1][0] + 3 * vars[1][1] + vars[1][2] + vars[2][1]);
for y in 0..SIZE {
for x in 0..SIZE {
let value = board[y][x];
if value!= 0 {
sys.set_value(vars[y][x], value);
}
}
}
(sys, vars)
}
fn | (dict: &Solution, vars: &Vec<Vec<VarToken>>) {
for y in 0..SIZE {
for x in 0..SIZE {
print!(" {}", dict[vars[y][x]]);
}
println!();
}
}
fn verify_sujiko(dict: &Solution, vars: &Vec<Vec<VarToken>>, expected: &Board) {
for y in 0..SIZE {
for x in 0..SIZE {
assert_eq!(dict[vars[y][x]], expected[y][x]);
}
}
}
#[test]
fn sujiko_simetric() {
let puzzle = [ [ 6,0,9 ], [ 0,0,0 ], [ 5,0,0 ] ];
let expected = [ [ 6,2,9 ], [ 8,1,3 ], [ 5,4,7 ] ];
let (mut sys, vars) = make_sujiko(&puzzle, 17, 15, 18, 15);
let dict = sys.solve_unique().expect("solution");
print_sujiko(&dict, &vars);
verify_sujiko(&dict, &vars, &expected);
println!("sujiko_simetric: {} guesses", sys.num_guesses());
}
| print_sujiko | identifier_name |
sujiko.rs | //! Sujiko.
//!
//! https://en.wikipedia.org/wiki/Sujiko
//! https://www.simetric.co.uk/sujiko/index.htm
extern crate puzzle_solver;
use puzzle_solver::{Puzzle,Solution,Val,VarToken};
const SIZE: usize = 3;
type Board = [[Val; SIZE]; SIZE];
fn make_sujiko(board: &Board, tl: Val, tr: Val, bl: Val, br: Val)
-> (Puzzle, Vec<Vec<VarToken>>) {
let mut sys = Puzzle::new();
let vars = sys.new_vars_with_candidates_2d(3, 3, &[1,2,3,4,5,6,7,8,9]);
sys.all_different(vars.iter().flat_map(|it| it));
sys.equals(tl, vars[0][0] + vars[0][1] + vars[1][0] + vars[1][1]);
sys.equals(tr, vars[0][1] + vars[0][2] + vars[1][1] + vars[1][2]);
sys.equals(bl, vars[1][0] + vars[1][1] + vars[2][0] + vars[2][1]);
sys.equals(br, vars[1][1] + vars[1][2] + vars[2][1] + vars[2][2]);
sys.equals(tl + tr + bl + br - (1..(9 + 1)).sum::<Val>(),
vars[0][1] + vars[1][0] + 3 * vars[1][1] + vars[1][2] + vars[2][1]);
for y in 0..SIZE {
for x in 0..SIZE {
let value = board[y][x];
if value!= 0 {
sys.set_value(vars[y][x], value);
}
}
}
| for y in 0..SIZE {
for x in 0..SIZE {
print!(" {}", dict[vars[y][x]]);
}
println!();
}
}
fn verify_sujiko(dict: &Solution, vars: &Vec<Vec<VarToken>>, expected: &Board) {
for y in 0..SIZE {
for x in 0..SIZE {
assert_eq!(dict[vars[y][x]], expected[y][x]);
}
}
}
#[test]
fn sujiko_simetric() {
let puzzle = [ [ 6,0,9 ], [ 0,0,0 ], [ 5,0,0 ] ];
let expected = [ [ 6,2,9 ], [ 8,1,3 ], [ 5,4,7 ] ];
let (mut sys, vars) = make_sujiko(&puzzle, 17, 15, 18, 15);
let dict = sys.solve_unique().expect("solution");
print_sujiko(&dict, &vars);
verify_sujiko(&dict, &vars, &expected);
println!("sujiko_simetric: {} guesses", sys.num_guesses());
} | (sys, vars)
}
fn print_sujiko(dict: &Solution, vars: &Vec<Vec<VarToken>>) { | random_line_split |
urlsearchparams.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::URLSearchParamsBinding;
use dom::bindings::codegen::Bindings::URLSearchParamsBinding::URLSearchParamsMethods;
use dom::bindings::codegen::UnionTypes::StringOrURLSearchParams;
use dom::bindings::codegen::UnionTypes::StringOrURLSearchParams::{eURLSearchParams, eString};
use dom::bindings::error::{Fallible};
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use util::str::DOMString;
use encoding::all::UTF_8;
use encoding::types::{EncodingRef, EncoderTrap};
use std::collections::HashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::fmt::radix;
use std::ascii::OwnedAsciiExt;
#[dom_struct]
pub struct URLSearchParams {
reflector_: Reflector,
data: DOMRefCell<HashMap<DOMString, Vec<DOMString>>>,
}
impl URLSearchParams {
fn new_inherited() -> URLSearchParams {
URLSearchParams {
reflector_: Reflector::new(),
data: DOMRefCell::new(HashMap::new()),
}
}
pub fn new(global: GlobalRef) -> Temporary<URLSearchParams> {
reflect_dom_object(box URLSearchParams::new_inherited(), global, URLSearchParamsBinding::Wrap)
}
pub fn | (global: GlobalRef, init: Option<StringOrURLSearchParams>) -> Fallible<Temporary<URLSearchParams>> {
let usp = URLSearchParams::new(global).root();
match init {
Some(eString(_s)) => {
// XXXManishearth we need to parse the input here
// http://url.spec.whatwg.org/#concept-urlencoded-parser
// We can use rust-url's implementation here:
// https://github.com/SimonSapin/rust-url/blob/master/form_urlencoded.rs#L29
},
Some(eURLSearchParams(u)) => {
let u = u.root();
let usp = usp.r();
let mut map = usp.data.borrow_mut();
*map = u.r().data.borrow().clone();
},
None => {}
}
Ok(Temporary::from_rooted(usp.r()))
}
}
impl<'a> URLSearchParamsMethods for JSRef<'a, URLSearchParams> {
fn Append(self, name: DOMString, value: DOMString) {
let mut data = self.data.borrow_mut();
match data.entry(name) {
Occupied(entry) => entry.into_mut().push(value),
Vacant(entry) => {
entry.insert(vec!(value));
}
}
self.update_steps();
}
fn Delete(self, name: DOMString) {
self.data.borrow_mut().remove(&name);
self.update_steps();
}
fn Get(self, name: DOMString) -> Option<DOMString> {
self.data.borrow().get(&name).map(|v| v[0].clone())
}
fn Has(self, name: DOMString) -> bool {
self.data.borrow().contains_key(&name)
}
fn Set(self, name: DOMString, value: DOMString) {
self.data.borrow_mut().insert(name, vec!(value));
self.update_steps();
}
}
pub trait URLSearchParamsHelpers {
fn serialize(&self, encoding: Option<EncodingRef>) -> Vec<u8>;
fn update_steps(&self);
}
impl URLSearchParamsHelpers for URLSearchParams {
fn serialize(&self, encoding: Option<EncodingRef>) -> Vec<u8> {
// http://url.spec.whatwg.org/#concept-urlencoded-serializer
fn serialize_string(value: &DOMString, encoding: EncodingRef) -> Vec<u8> {
// http://url.spec.whatwg.org/#concept-urlencoded-byte-serializer
let value = value.as_slice();
// XXXManishearth should this be a strict encoding? Can unwrap()ing the result fail?
let value = encoding.encode(value, EncoderTrap::Replace).unwrap();
let mut buf = vec!();
for i in value.iter() {
let append = match *i {
0x20 => vec!(0x2B),
0x2A | 0x2D | 0x2E |
0x30... 0x39 | 0x41... 0x5A |
0x5F | 0x61...0x7A => vec!(*i),
a => {
// http://url.spec.whatwg.org/#percent-encode
let mut encoded = vec!(0x25); // %
let s = format!("{}", radix(a, 16)).into_ascii_uppercase();
let bytes = s.as_bytes();
encoded.push_all(bytes);
encoded
}
};
buf.push_all(append.as_slice());
}
buf
}
let encoding = encoding.unwrap_or(UTF_8 as EncodingRef);
let mut buf = vec!();
let mut first_pair = true;
for (k, v) in self.data.borrow().iter() {
let name = serialize_string(k, encoding);
for val in v.iter() {
let value = serialize_string(val, encoding);
if first_pair {
first_pair = false;
} else {
buf.push(0x26); // &
}
buf.push_all(name.as_slice());
buf.push(0x3D); // =
buf.push_all(value.as_slice())
}
}
buf
}
fn update_steps(&self) {
// XXXManishearth Implement this when the URL interface is implemented
// http://url.spec.whatwg.org/#concept-uq-update
}
}
| Constructor | identifier_name |
urlsearchparams.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::URLSearchParamsBinding;
use dom::bindings::codegen::Bindings::URLSearchParamsBinding::URLSearchParamsMethods;
use dom::bindings::codegen::UnionTypes::StringOrURLSearchParams;
use dom::bindings::codegen::UnionTypes::StringOrURLSearchParams::{eURLSearchParams, eString};
use dom::bindings::error::{Fallible};
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use util::str::DOMString;
use encoding::all::UTF_8;
use encoding::types::{EncodingRef, EncoderTrap};
use std::collections::HashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::fmt::radix;
use std::ascii::OwnedAsciiExt;
#[dom_struct]
pub struct URLSearchParams {
reflector_: Reflector,
data: DOMRefCell<HashMap<DOMString, Vec<DOMString>>>,
}
impl URLSearchParams {
fn new_inherited() -> URLSearchParams {
URLSearchParams {
reflector_: Reflector::new(),
data: DOMRefCell::new(HashMap::new()),
}
}
pub fn new(global: GlobalRef) -> Temporary<URLSearchParams> {
reflect_dom_object(box URLSearchParams::new_inherited(), global, URLSearchParamsBinding::Wrap)
} | Some(eString(_s)) => {
// XXXManishearth we need to parse the input here
// http://url.spec.whatwg.org/#concept-urlencoded-parser
// We can use rust-url's implementation here:
// https://github.com/SimonSapin/rust-url/blob/master/form_urlencoded.rs#L29
},
Some(eURLSearchParams(u)) => {
let u = u.root();
let usp = usp.r();
let mut map = usp.data.borrow_mut();
*map = u.r().data.borrow().clone();
},
None => {}
}
Ok(Temporary::from_rooted(usp.r()))
}
}
impl<'a> URLSearchParamsMethods for JSRef<'a, URLSearchParams> {
fn Append(self, name: DOMString, value: DOMString) {
let mut data = self.data.borrow_mut();
match data.entry(name) {
Occupied(entry) => entry.into_mut().push(value),
Vacant(entry) => {
entry.insert(vec!(value));
}
}
self.update_steps();
}
fn Delete(self, name: DOMString) {
self.data.borrow_mut().remove(&name);
self.update_steps();
}
fn Get(self, name: DOMString) -> Option<DOMString> {
self.data.borrow().get(&name).map(|v| v[0].clone())
}
fn Has(self, name: DOMString) -> bool {
self.data.borrow().contains_key(&name)
}
fn Set(self, name: DOMString, value: DOMString) {
self.data.borrow_mut().insert(name, vec!(value));
self.update_steps();
}
}
pub trait URLSearchParamsHelpers {
fn serialize(&self, encoding: Option<EncodingRef>) -> Vec<u8>;
fn update_steps(&self);
}
impl URLSearchParamsHelpers for URLSearchParams {
fn serialize(&self, encoding: Option<EncodingRef>) -> Vec<u8> {
// http://url.spec.whatwg.org/#concept-urlencoded-serializer
fn serialize_string(value: &DOMString, encoding: EncodingRef) -> Vec<u8> {
// http://url.spec.whatwg.org/#concept-urlencoded-byte-serializer
let value = value.as_slice();
// XXXManishearth should this be a strict encoding? Can unwrap()ing the result fail?
let value = encoding.encode(value, EncoderTrap::Replace).unwrap();
let mut buf = vec!();
for i in value.iter() {
let append = match *i {
0x20 => vec!(0x2B),
0x2A | 0x2D | 0x2E |
0x30... 0x39 | 0x41... 0x5A |
0x5F | 0x61...0x7A => vec!(*i),
a => {
// http://url.spec.whatwg.org/#percent-encode
let mut encoded = vec!(0x25); // %
let s = format!("{}", radix(a, 16)).into_ascii_uppercase();
let bytes = s.as_bytes();
encoded.push_all(bytes);
encoded
}
};
buf.push_all(append.as_slice());
}
buf
}
let encoding = encoding.unwrap_or(UTF_8 as EncodingRef);
let mut buf = vec!();
let mut first_pair = true;
for (k, v) in self.data.borrow().iter() {
let name = serialize_string(k, encoding);
for val in v.iter() {
let value = serialize_string(val, encoding);
if first_pair {
first_pair = false;
} else {
buf.push(0x26); // &
}
buf.push_all(name.as_slice());
buf.push(0x3D); // =
buf.push_all(value.as_slice())
}
}
buf
}
fn update_steps(&self) {
// XXXManishearth Implement this when the URL interface is implemented
// http://url.spec.whatwg.org/#concept-uq-update
}
} |
pub fn Constructor(global: GlobalRef, init: Option<StringOrURLSearchParams>) -> Fallible<Temporary<URLSearchParams>> {
let usp = URLSearchParams::new(global).root();
match init { | random_line_split |
urlsearchparams.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::URLSearchParamsBinding;
use dom::bindings::codegen::Bindings::URLSearchParamsBinding::URLSearchParamsMethods;
use dom::bindings::codegen::UnionTypes::StringOrURLSearchParams;
use dom::bindings::codegen::UnionTypes::StringOrURLSearchParams::{eURLSearchParams, eString};
use dom::bindings::error::{Fallible};
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use util::str::DOMString;
use encoding::all::UTF_8;
use encoding::types::{EncodingRef, EncoderTrap};
use std::collections::HashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::fmt::radix;
use std::ascii::OwnedAsciiExt;
#[dom_struct]
pub struct URLSearchParams {
reflector_: Reflector,
data: DOMRefCell<HashMap<DOMString, Vec<DOMString>>>,
}
impl URLSearchParams {
fn new_inherited() -> URLSearchParams {
URLSearchParams {
reflector_: Reflector::new(),
data: DOMRefCell::new(HashMap::new()),
}
}
pub fn new(global: GlobalRef) -> Temporary<URLSearchParams> {
reflect_dom_object(box URLSearchParams::new_inherited(), global, URLSearchParamsBinding::Wrap)
}
pub fn Constructor(global: GlobalRef, init: Option<StringOrURLSearchParams>) -> Fallible<Temporary<URLSearchParams>> {
let usp = URLSearchParams::new(global).root();
match init {
Some(eString(_s)) => {
// XXXManishearth we need to parse the input here
// http://url.spec.whatwg.org/#concept-urlencoded-parser
// We can use rust-url's implementation here:
// https://github.com/SimonSapin/rust-url/blob/master/form_urlencoded.rs#L29
},
Some(eURLSearchParams(u)) => {
let u = u.root();
let usp = usp.r();
let mut map = usp.data.borrow_mut();
*map = u.r().data.borrow().clone();
},
None => {}
}
Ok(Temporary::from_rooted(usp.r()))
}
}
impl<'a> URLSearchParamsMethods for JSRef<'a, URLSearchParams> {
fn Append(self, name: DOMString, value: DOMString) {
let mut data = self.data.borrow_mut();
match data.entry(name) {
Occupied(entry) => entry.into_mut().push(value),
Vacant(entry) => {
entry.insert(vec!(value));
}
}
self.update_steps();
}
fn Delete(self, name: DOMString) |
fn Get(self, name: DOMString) -> Option<DOMString> {
self.data.borrow().get(&name).map(|v| v[0].clone())
}
fn Has(self, name: DOMString) -> bool {
self.data.borrow().contains_key(&name)
}
fn Set(self, name: DOMString, value: DOMString) {
self.data.borrow_mut().insert(name, vec!(value));
self.update_steps();
}
}
pub trait URLSearchParamsHelpers {
fn serialize(&self, encoding: Option<EncodingRef>) -> Vec<u8>;
fn update_steps(&self);
}
impl URLSearchParamsHelpers for URLSearchParams {
fn serialize(&self, encoding: Option<EncodingRef>) -> Vec<u8> {
// http://url.spec.whatwg.org/#concept-urlencoded-serializer
fn serialize_string(value: &DOMString, encoding: EncodingRef) -> Vec<u8> {
// http://url.spec.whatwg.org/#concept-urlencoded-byte-serializer
let value = value.as_slice();
// XXXManishearth should this be a strict encoding? Can unwrap()ing the result fail?
let value = encoding.encode(value, EncoderTrap::Replace).unwrap();
let mut buf = vec!();
for i in value.iter() {
let append = match *i {
0x20 => vec!(0x2B),
0x2A | 0x2D | 0x2E |
0x30... 0x39 | 0x41... 0x5A |
0x5F | 0x61...0x7A => vec!(*i),
a => {
// http://url.spec.whatwg.org/#percent-encode
let mut encoded = vec!(0x25); // %
let s = format!("{}", radix(a, 16)).into_ascii_uppercase();
let bytes = s.as_bytes();
encoded.push_all(bytes);
encoded
}
};
buf.push_all(append.as_slice());
}
buf
}
let encoding = encoding.unwrap_or(UTF_8 as EncodingRef);
let mut buf = vec!();
let mut first_pair = true;
for (k, v) in self.data.borrow().iter() {
let name = serialize_string(k, encoding);
for val in v.iter() {
let value = serialize_string(val, encoding);
if first_pair {
first_pair = false;
} else {
buf.push(0x26); // &
}
buf.push_all(name.as_slice());
buf.push(0x3D); // =
buf.push_all(value.as_slice())
}
}
buf
}
fn update_steps(&self) {
// XXXManishearth Implement this when the URL interface is implemented
// http://url.spec.whatwg.org/#concept-uq-update
}
}
| {
self.data.borrow_mut().remove(&name);
self.update_steps();
} | identifier_body |
ambig_impl_2_exe.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:ambig_impl_2_lib.rs
extern crate ambig_impl_2_lib;
use ambig_impl_2_lib::me;
trait me2 {
fn me(&self) -> uint;
}
impl me2 for uint { fn me(&self) -> uint | } //~ NOTE is `uint.me2::me`
fn main() { 1u.me(); } //~ ERROR multiple applicable methods in scope
//~^ NOTE is `ambig_impl_2_lib::uint.me::me`
| { *self } | identifier_body |
ambig_impl_2_exe.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:ambig_impl_2_lib.rs
extern crate ambig_impl_2_lib;
use ambig_impl_2_lib::me; | }
impl me2 for uint { fn me(&self) -> uint { *self } } //~ NOTE is `uint.me2::me`
fn main() { 1u.me(); } //~ ERROR multiple applicable methods in scope
//~^ NOTE is `ambig_impl_2_lib::uint.me::me` | trait me2 {
fn me(&self) -> uint; | random_line_split |
ambig_impl_2_exe.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:ambig_impl_2_lib.rs
extern crate ambig_impl_2_lib;
use ambig_impl_2_lib::me;
trait me2 {
fn me(&self) -> uint;
}
impl me2 for uint { fn me(&self) -> uint { *self } } //~ NOTE is `uint.me2::me`
fn | () { 1u.me(); } //~ ERROR multiple applicable methods in scope
//~^ NOTE is `ambig_impl_2_lib::uint.me::me`
| main | identifier_name |
if-let.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(if_let)]
pub fn main() {
let x = Some(3i);
if let Some(y) = x {
assert_eq!(y, 3i);
} else {
panic!("if-let panicked");
}
let mut worked = false;
if let Some(_) = x {
worked = true;
}
assert!(worked);
let clause: uint;
if let None = Some("test") {
clause = 1;
} else if 4u > 5 {
clause = 2;
} else if let Ok(()) = Err::<(),&'static str>("test") {
clause = 3; |
if 3i > 4 {
panic!("bad math");
} else if let 1 = 2i {
panic!("bad pattern match");
}
enum Foo {
One,
Two(uint),
Three(String, int)
}
let foo = Foo::Three("three".to_string(), 42i);
if let Foo::One = foo {
panic!("bad pattern match");
} else if let Foo::Two(_x) = foo {
panic!("bad pattern match");
} else if let Foo::Three(s, _) = foo {
assert_eq!(s.as_slice(), "three");
} else {
panic!("bad else");
}
if false {
panic!("wat");
} else if let a@Foo::Two(_) = Foo::Two(42u) {
if let Foo::Two(b) = a {
assert_eq!(b, 42u);
} else {
panic!("panic in nested if-let");
}
}
} | } else {
clause = 4;
}
assert_eq!(clause, 4u); | random_line_split |
if-let.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(if_let)]
pub fn | () {
let x = Some(3i);
if let Some(y) = x {
assert_eq!(y, 3i);
} else {
panic!("if-let panicked");
}
let mut worked = false;
if let Some(_) = x {
worked = true;
}
assert!(worked);
let clause: uint;
if let None = Some("test") {
clause = 1;
} else if 4u > 5 {
clause = 2;
} else if let Ok(()) = Err::<(),&'static str>("test") {
clause = 3;
} else {
clause = 4;
}
assert_eq!(clause, 4u);
if 3i > 4 {
panic!("bad math");
} else if let 1 = 2i {
panic!("bad pattern match");
}
enum Foo {
One,
Two(uint),
Three(String, int)
}
let foo = Foo::Three("three".to_string(), 42i);
if let Foo::One = foo {
panic!("bad pattern match");
} else if let Foo::Two(_x) = foo {
panic!("bad pattern match");
} else if let Foo::Three(s, _) = foo {
assert_eq!(s.as_slice(), "three");
} else {
panic!("bad else");
}
if false {
panic!("wat");
} else if let a@Foo::Two(_) = Foo::Two(42u) {
if let Foo::Two(b) = a {
assert_eq!(b, 42u);
} else {
panic!("panic in nested if-let");
}
}
}
| main | identifier_name |
label.rs | use sfml::graphics::{RenderTarget, Text, TextStyle, Color};
use sfml::system::vector2::Vector2f;
use window::Window;
use font::Font;
/// A label (text)
pub struct Label<'a> {
text: Text<'a>,
}
impl<'a> Label<'a> {
/// Create a new label
pub fn new(font: &'a Font) -> Self {
let mut label = Label {
text: Text::new().unwrap(),
};
label.text.set_font(font.to_sfml_font());
label
}
/// Set the text
pub fn text(&mut self, s: &str) -> &mut Self {
self.text.set_string(s);
self
}
/// Set the font size
pub fn size(&mut self, s: u32) -> &mut Self {
self.text.set_character_size(s);
self
}
/// Set the x coordinate of the label
pub fn x(&mut self, x: f32) -> &mut Self {
let y = self.text.get_position().y;
self.text.set_position(&Vector2f::new(x, y));
self
}
/// Set the y coordinate of the label
pub fn y(&mut self, y: f32) -> &mut Self {
let x = self.text.get_position().x;
self.text.set_position(&Vector2f::new(x, y));
self
}
/// Set the color of the label
pub fn color(&mut self, r: u8, g: u8, b: u8) -> &mut Self {
self.text.set_color(&Color::new_rgb(r, g, b));
self
}
/// Set the opacity of the label
pub fn alpha(&mut self, a: u8) -> &mut Self {
let mut color = self.text.get_color();
color.alpha = a;
self.text.set_color(&color);
self
}
/// Make the label bold
pub fn bold(&mut self) -> &mut Self {
self.text.set_style(TextStyle::Bold);
self
}
/// Make the label italic
pub fn italic(&mut self) -> &mut Self {
self.text.set_style(TextStyle::Italic);
self
}
/// Make the label underlined
pub fn underlined(&mut self) -> &mut Self {
self.text.set_style(TextStyle::Underlined);
self
}
/// Move the label relative to the current position
pub fn go(&mut self, x: f32, y: f32) -> &mut Self {
let (px, py) = self.pos();
self.set((px + x, py + y));
self
}
/// Rotate the label where a is given in degrees
pub fn rotate(&mut self, a: f32) -> &mut Self {
self.text.rotate(a);
self
}
/// Step in the current direction
pub fn step(&mut self, size: f32) -> &mut Self {
let angle = self.text.get_rotation().to_radians();
self.go(angle.cos() * size, angle.sin() * size);
self
}
/// Get the length to a given point
pub fn length_to(&self, (x, y): (f32, f32)) -> f32 {
let pos = self.text.get_position();
let dx = x - pos.x;
let dy = y - pos.y;
(dx * dx + dy * dy).sqrt()
}
/// Get the position of the label
pub fn pos(&self) -> (f32, f32) {
let pos = self.text.get_position();
(pos.x, pos.y)
}
/// Set the position of the label
pub fn set(&mut self, (x, y): (f32, f32)) -> &mut Self {
self.x(x);
self.y(y)
} | }
} |
/// Draw the label on a window
pub fn draw(&mut self, window: &mut Window) {
window.to_sfml_window().draw(&mut self.text); | random_line_split |
label.rs | use sfml::graphics::{RenderTarget, Text, TextStyle, Color};
use sfml::system::vector2::Vector2f;
use window::Window;
use font::Font;
/// A label (text)
pub struct Label<'a> {
text: Text<'a>,
}
impl<'a> Label<'a> {
/// Create a new label
pub fn new(font: &'a Font) -> Self {
let mut label = Label {
text: Text::new().unwrap(),
};
label.text.set_font(font.to_sfml_font());
label
}
/// Set the text
pub fn text(&mut self, s: &str) -> &mut Self {
self.text.set_string(s);
self
}
/// Set the font size
pub fn size(&mut self, s: u32) -> &mut Self {
self.text.set_character_size(s);
self
}
/// Set the x coordinate of the label
pub fn x(&mut self, x: f32) -> &mut Self {
let y = self.text.get_position().y;
self.text.set_position(&Vector2f::new(x, y));
self
}
/// Set the y coordinate of the label
pub fn y(&mut self, y: f32) -> &mut Self {
let x = self.text.get_position().x;
self.text.set_position(&Vector2f::new(x, y));
self
}
/// Set the color of the label
pub fn color(&mut self, r: u8, g: u8, b: u8) -> &mut Self {
self.text.set_color(&Color::new_rgb(r, g, b));
self
}
/// Set the opacity of the label
pub fn alpha(&mut self, a: u8) -> &mut Self {
let mut color = self.text.get_color();
color.alpha = a;
self.text.set_color(&color);
self
}
/// Make the label bold
pub fn bold(&mut self) -> &mut Self {
self.text.set_style(TextStyle::Bold);
self
}
/// Make the label italic
pub fn italic(&mut self) -> &mut Self {
self.text.set_style(TextStyle::Italic);
self
}
/// Make the label underlined
pub fn underlined(&mut self) -> &mut Self {
self.text.set_style(TextStyle::Underlined);
self
}
/// Move the label relative to the current position
pub fn go(&mut self, x: f32, y: f32) -> &mut Self {
let (px, py) = self.pos();
self.set((px + x, py + y));
self
}
/// Rotate the label where a is given in degrees
pub fn rotate(&mut self, a: f32) -> &mut Self {
self.text.rotate(a);
self
}
/// Step in the current direction
pub fn step(&mut self, size: f32) -> &mut Self {
let angle = self.text.get_rotation().to_radians();
self.go(angle.cos() * size, angle.sin() * size);
self
}
/// Get the length to a given point
pub fn length_to(&self, (x, y): (f32, f32)) -> f32 {
let pos = self.text.get_position();
let dx = x - pos.x;
let dy = y - pos.y;
(dx * dx + dy * dy).sqrt()
}
/// Get the position of the label
pub fn | (&self) -> (f32, f32) {
let pos = self.text.get_position();
(pos.x, pos.y)
}
/// Set the position of the label
pub fn set(&mut self, (x, y): (f32, f32)) -> &mut Self {
self.x(x);
self.y(y)
}
/// Draw the label on a window
pub fn draw(&mut self, window: &mut Window) {
window.to_sfml_window().draw(&mut self.text);
}
}
| pos | identifier_name |
main.rs | use rooster::rclio::RegularInputOutput;
use std::env::VarError;
use std::path::PathBuf;
const ROOSTER_FILE_ENV_VAR: &'static str = "ROOSTER_FILE";
const ROOSTER_FILE_DEFAULT: &'static str = ".passwords.rooster";
fn get_password_file_path() -> Result<PathBuf, i32> {
// First, look for the ROOSTER_FILE environment variable.
match std::env::var(ROOSTER_FILE_ENV_VAR) {
Ok(filename) => Ok(PathBuf::from(filename)),
Err(VarError::NotPresent) => {
// If the environment variable is not there, we'll look in the default location:
// ~/.passwords.rooster
let mut file_default = PathBuf::from(
dirs::home_dir()
.ok_or(1)?
.as_os_str() | .to_os_string()
.into_string()
.map_err(|_| 1)?,
);
file_default.push(ROOSTER_FILE_DEFAULT);
Ok(file_default)
}
Err(VarError::NotUnicode(_)) => Err(1),
}
}
fn main() {
let args = std::env::args().collect::<Vec<String>>();
let args_refs = args.iter().map(|s| s.as_str()).collect::<Vec<&str>>();
let rooster_file_path = get_password_file_path().unwrap_or_else(|err| std::process::exit(err));
let stdin = std::io::stdin();
let stdout = std::io::stdout();
let stderr = std::io::stderr();
std::process::exit(rooster::main_with_args(
args_refs.as_slice(),
&mut RegularInputOutput::new(stdin.lock(), stdout.lock(), stderr.lock()),
&rooster_file_path,
));
} | random_line_split |
|
main.rs | use rooster::rclio::RegularInputOutput;
use std::env::VarError;
use std::path::PathBuf;
const ROOSTER_FILE_ENV_VAR: &'static str = "ROOSTER_FILE";
const ROOSTER_FILE_DEFAULT: &'static str = ".passwords.rooster";
fn | () -> Result<PathBuf, i32> {
// First, look for the ROOSTER_FILE environment variable.
match std::env::var(ROOSTER_FILE_ENV_VAR) {
Ok(filename) => Ok(PathBuf::from(filename)),
Err(VarError::NotPresent) => {
// If the environment variable is not there, we'll look in the default location:
// ~/.passwords.rooster
let mut file_default = PathBuf::from(
dirs::home_dir()
.ok_or(1)?
.as_os_str()
.to_os_string()
.into_string()
.map_err(|_| 1)?,
);
file_default.push(ROOSTER_FILE_DEFAULT);
Ok(file_default)
}
Err(VarError::NotUnicode(_)) => Err(1),
}
}
fn main() {
let args = std::env::args().collect::<Vec<String>>();
let args_refs = args.iter().map(|s| s.as_str()).collect::<Vec<&str>>();
let rooster_file_path = get_password_file_path().unwrap_or_else(|err| std::process::exit(err));
let stdin = std::io::stdin();
let stdout = std::io::stdout();
let stderr = std::io::stderr();
std::process::exit(rooster::main_with_args(
args_refs.as_slice(),
&mut RegularInputOutput::new(stdin.lock(), stdout.lock(), stderr.lock()),
&rooster_file_path,
));
}
| get_password_file_path | identifier_name |
main.rs | use rooster::rclio::RegularInputOutput;
use std::env::VarError;
use std::path::PathBuf;
const ROOSTER_FILE_ENV_VAR: &'static str = "ROOSTER_FILE";
const ROOSTER_FILE_DEFAULT: &'static str = ".passwords.rooster";
fn get_password_file_path() -> Result<PathBuf, i32> | }
fn main() {
let args = std::env::args().collect::<Vec<String>>();
let args_refs = args.iter().map(|s| s.as_str()).collect::<Vec<&str>>();
let rooster_file_path = get_password_file_path().unwrap_or_else(|err| std::process::exit(err));
let stdin = std::io::stdin();
let stdout = std::io::stdout();
let stderr = std::io::stderr();
std::process::exit(rooster::main_with_args(
args_refs.as_slice(),
&mut RegularInputOutput::new(stdin.lock(), stdout.lock(), stderr.lock()),
&rooster_file_path,
));
}
| {
// First, look for the ROOSTER_FILE environment variable.
match std::env::var(ROOSTER_FILE_ENV_VAR) {
Ok(filename) => Ok(PathBuf::from(filename)),
Err(VarError::NotPresent) => {
// If the environment variable is not there, we'll look in the default location:
// ~/.passwords.rooster
let mut file_default = PathBuf::from(
dirs::home_dir()
.ok_or(1)?
.as_os_str()
.to_os_string()
.into_string()
.map_err(|_| 1)?,
);
file_default.push(ROOSTER_FILE_DEFAULT);
Ok(file_default)
}
Err(VarError::NotUnicode(_)) => Err(1),
} | identifier_body |
private-inferred-type-2.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:private-inferred-type.rs
extern crate private_inferred_type as ext;
mod m {
struct Priv;
pub struct Pub<T>(pub T);
impl Pub<Priv> {
pub fn get_priv() -> Priv { Priv }
pub fn static_method() {} | m::Pub::static_method; //~ ERROR type `m::Priv` is private
ext::Pub::static_method; //~ ERROR type `ext::Priv` is private
} | }
}
fn main() {
m::Pub::get_priv; //~ ERROR type `m::Priv` is private | random_line_split |
private-inferred-type-2.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:private-inferred-type.rs
extern crate private_inferred_type as ext;
mod m {
struct Priv;
pub struct Pub<T>(pub T);
impl Pub<Priv> {
pub fn get_priv() -> Priv { Priv }
pub fn | () {}
}
}
fn main() {
m::Pub::get_priv; //~ ERROR type `m::Priv` is private
m::Pub::static_method; //~ ERROR type `m::Priv` is private
ext::Pub::static_method; //~ ERROR type `ext::Priv` is private
}
| static_method | identifier_name |
propane.rs | // Lumol, an extensible molecular simulation engine
// Copyright (C) 2015-2016 Lumol's contributors — BSD license
#[macro_use]
extern crate bencher;
extern crate rand;
extern crate lumol;
extern crate lumol_input;
use bencher::Bencher;
use rand::Rng;
use lumol::sys::EnergyCache;
use lumol::types::Vector3D;
mod utils;
fn energy(bencher: &mut Bencher) {
let system = utils::get_system("propane");
bencher.iter(||{
let _ = system.potential_energy();
})
}
fn forces(bencher: &mut Bencher) {
let system = utils::get_system("propane");
bencher.iter(||{
let _ = system.forces();
})
}
fn virial(bencher: &mut Bencher) {
let system = utils::get_system("propane");
bencher.iter(||{
let _ = system.virial();
})
}
fn cache_move_particles(bencher: &mut Bencher) {
let system = utils::get_system("propane");
let mut cache = EnergyCache::new();
cache.init(&system);
let mut rng = rand::weak_rng();
let molecule = rng.choose(system.molecules()).unwrap();
let mut delta = vec![];
for i in molecule {
let position = system[i].position;
delta.push(position + Vector3D::new(rng.gen(), rng.gen(), rng.gen()));
}
bencher.iter(||{
cache.move_particles_cost(&system, molecule.iter().collect(), &delta)
})
}
fn ca | encher: &mut Bencher) {
let mut system = utils::get_system("propane");
let mut cache = EnergyCache::new();
cache.init(&system);
let mut rng = rand::weak_rng();
for molecule in system.molecules().to_owned() {
let delta = Vector3D::new(rng.gen(), rng.gen(), rng.gen());
for i in molecule {
system[i].position += delta;
}
}
bencher.iter(||{
cache.move_all_rigid_molecules_cost(&system)
})
}
benchmark_group!(energy_computation, energy, forces, virial);
benchmark_group!(monte_carlo_cache, cache_move_particles, cache_move_all_rigid_molecules);
benchmark_main!(energy_computation, monte_carlo_cache);
| che_move_all_rigid_molecules(b | identifier_name |
propane.rs | // Lumol, an extensible molecular simulation engine
// Copyright (C) 2015-2016 Lumol's contributors — BSD license
#[macro_use]
extern crate bencher;
extern crate rand;
extern crate lumol;
extern crate lumol_input;
use bencher::Bencher;
use rand::Rng;
use lumol::sys::EnergyCache;
use lumol::types::Vector3D;
mod utils;
fn energy(bencher: &mut Bencher) {
let system = utils::get_system("propane");
bencher.iter(||{
let _ = system.potential_energy();
})
}
fn forces(bencher: &mut Bencher) {
let system = utils::get_system("propane");
bencher.iter(||{
let _ = system.forces();
})
}
fn virial(bencher: &mut Bencher) {
let system = utils::get_system("propane");
bencher.iter(||{
let _ = system.virial();
})
}
fn cache_move_particles(bencher: &mut Bencher) {
let system = utils::get_system("propane");
let mut cache = EnergyCache::new();
cache.init(&system);
let mut rng = rand::weak_rng();
let molecule = rng.choose(system.molecules()).unwrap();
let mut delta = vec![];
for i in molecule {
let position = system[i].position;
delta.push(position + Vector3D::new(rng.gen(), rng.gen(), rng.gen()));
}
bencher.iter(||{
cache.move_particles_cost(&system, molecule.iter().collect(), &delta)
})
}
fn cache_move_all_rigid_molecules(bencher: &mut Bencher) {
|
benchmark_group!(energy_computation, energy, forces, virial);
benchmark_group!(monte_carlo_cache, cache_move_particles, cache_move_all_rigid_molecules);
benchmark_main!(energy_computation, monte_carlo_cache);
| let mut system = utils::get_system("propane");
let mut cache = EnergyCache::new();
cache.init(&system);
let mut rng = rand::weak_rng();
for molecule in system.molecules().to_owned() {
let delta = Vector3D::new(rng.gen(), rng.gen(), rng.gen());
for i in molecule {
system[i].position += delta;
}
}
bencher.iter(||{
cache.move_all_rigid_molecules_cost(&system)
})
}
| identifier_body |
propane.rs | // Lumol, an extensible molecular simulation engine
// Copyright (C) 2015-2016 Lumol's contributors — BSD license
#[macro_use]
extern crate bencher;
extern crate rand;
extern crate lumol;
extern crate lumol_input;
use bencher::Bencher;
use rand::Rng;
use lumol::sys::EnergyCache;
use lumol::types::Vector3D;
mod utils;
fn energy(bencher: &mut Bencher) {
let system = utils::get_system("propane");
bencher.iter(||{
let _ = system.potential_energy();
})
} |
fn forces(bencher: &mut Bencher) {
let system = utils::get_system("propane");
bencher.iter(||{
let _ = system.forces();
})
}
fn virial(bencher: &mut Bencher) {
let system = utils::get_system("propane");
bencher.iter(||{
let _ = system.virial();
})
}
fn cache_move_particles(bencher: &mut Bencher) {
let system = utils::get_system("propane");
let mut cache = EnergyCache::new();
cache.init(&system);
let mut rng = rand::weak_rng();
let molecule = rng.choose(system.molecules()).unwrap();
let mut delta = vec![];
for i in molecule {
let position = system[i].position;
delta.push(position + Vector3D::new(rng.gen(), rng.gen(), rng.gen()));
}
bencher.iter(||{
cache.move_particles_cost(&system, molecule.iter().collect(), &delta)
})
}
fn cache_move_all_rigid_molecules(bencher: &mut Bencher) {
let mut system = utils::get_system("propane");
let mut cache = EnergyCache::new();
cache.init(&system);
let mut rng = rand::weak_rng();
for molecule in system.molecules().to_owned() {
let delta = Vector3D::new(rng.gen(), rng.gen(), rng.gen());
for i in molecule {
system[i].position += delta;
}
}
bencher.iter(||{
cache.move_all_rigid_molecules_cost(&system)
})
}
benchmark_group!(energy_computation, energy, forces, virial);
benchmark_group!(monte_carlo_cache, cache_move_particles, cache_move_all_rigid_molecules);
benchmark_main!(energy_computation, monte_carlo_cache); | random_line_split |
|
main.rs | // For random generation
extern crate rand;
// For fmt::Display
use std::fmt;
// For I/O (stdin, stdout, etc)
use std::io::prelude::*;
use rand::Rng;
/// A simple struct for a board
struct Board {
/// The cells of the board
cells: Vec<bool>,
/// The size of the board
size: usize,
}
// Functions for the Board struct
impl Board {
/// Generate a new, empty board, of size >= 1
///
/// Returns a Board in the "off" state, where all cells are 0.
/// If a size of 0 is given, a Board of size 1 will be created instead.
/// A mutable board is required for using Board::fliprow and Board::flipcol functions.
///
/// ```
/// let mut board: Board = Board::new(3);
/// ```
fn new(size: usize) -> Board {
// Ensure we make a board with a non-zero size
if size > 0 {
Board {
cells: vec![false; size * size],
size,
}
} else {
Board::new(1)
}
}
/// Flip the specified row
///
/// Returns true if the row is within the size, false otherwise.
///
/// ```
/// let mut board: Board = Board::new(3);
/// board.fliprow(1);
/// ```
fn fliprow(&mut self, row: usize) -> bool {
// Check constraints
if row > self.size {
return false;
}
// Starting position in the vector
let start = row * self.size;
// Loop through the vector row
for i in start..start + self.size {
self.cells[i] =!self.cells[i];
}
true
}
/// Flip the specified column
///
/// Returns true if the column is within the size, false otherwise.
///
/// ```
/// let mut board: Board = Board::new(3);
/// board.flipcol(0);
/// ```
fn flipcol(&mut self, col: usize) -> bool {
// Check constraints
if col > self.size {
return false;
}
// Loop through the vector column
for i in 0..self.size {
self.cells[col + i * self.size] =!self.cells[col + i * self.size];
}
true
}
/// Generate a random board
///
/// Returns a Board in a random state.
/// If a size of 0 is given, a Board of size 1 will be created instead.
///
/// ```
/// let target: Board = Board::random(3);
/// ```
fn random<R: Rng>(rng: &mut R, size: usize) -> Board {
// Ensure we make a board with a non-zero size
if size == 0 {
return Board::random(rng, 1);
}
// Make a vector of the board size with random bits
let cells = (0..size * size)
.map(|_| rng.gen::<bool>())
.collect::<Vec<_>>();
// Return the random board
Board { cells, size }
}
}
impl PartialEq for Board {
fn | (&self, rhs: &Board) -> bool {
self.cells == rhs.cells
}
}
// Implement the Display format, used with `print!("{}", &board);`
impl fmt::Display for Board {
// Example output:
// 0 1 2
// 0 0 1 0
// 1 1 0 0
// 2 0 1 1
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Get the string width of the size of the board
let width = (self.size - 1).to_string().len();
// Write the initial spaces (upper left)
write!(f, "{space: >0$}", width, space = " ")?;
// Write the column numbers
for i in 0..self.size {
write!(f, " {offset:>0$}", width, offset = i)?;
}
// Newline for rows
writeln!(f)?;
// Loop through the rows
for row in 0..self.size {
// Write the row number
write!(f, "{row:>0$}", width, row = row)?;
// Loop through the columns
for col in 0..self.size {
// Get the value of the cell as 1 or 0
let p = self.cells[row * self.size + col] as usize;
// Write the column value
write!(f, " {col:>0$}", width, col = p)?;
}
// Newline for next row
writeln!(f)?;
}
// Return Formatter result
Ok(())
}
}
fn main() {
let mut rng = rand::thread_rng();
// The board size
let size: usize = 3;
// The target board
let target: Board = Board::random(&mut rng, size);
// The user board
let mut board: Board = Board::new(size);
// How many moves taken
let mut moves: u32 = 0;
// Loop until win or quit
'mainloop: loop {
// User input
let mut input: String;
// Write the boards
println!("Target:\n{}\nBoard:\n{}", &target, &board);
// User input loop
'userinput: loop {
// Prompt
print!("\nFlip? [q|[r|c]#] ");
// Flush stdout to write the previous print, if we can't then exit
match std::io::stdout().flush() {
Ok(_) => {}
Err(e) => {
println!("Error: cannot flush stdout: {}", e);
break'mainloop;
}
};
// Reset input for each loop
input = String::new();
// Read user input
match std::io::stdin().read_line(&mut input) {
Ok(_) => {
input = input.trim().to_string();
// Get the first character
let rc: char = match input.chars().next() {
Some(c) => c,
None => {
println!("Error: No input");
continue 'userinput;
}
};
// Make sure input is r, c, or q
if rc!= 'r' && rc!= 'c' && rc!= 'q' {
println!("Error: '{}': Must use 'r'ow or 'c'olumn or 'q'uit", rc);
continue 'userinput;
}
// If input is q, exit game
if rc == 'q' {
println!("Thanks for playing!");
break'mainloop;
}
// If input is r or c, get the number after
let n: usize = match input[1..].to_string().parse() {
Ok(x) => {
// If we're within bounds, return the parsed number
if x < size {
x
} else {
println!(
"Error: Must specify a row or column within size({})",
size
);
continue 'userinput;
}
}
Err(_) => {
println!(
"Error: '{}': Unable to parse row or column number",
input[1..].to_string()
);
continue 'userinput;
}
};
// Flip the row or column specified
match rc {
'r' => board.fliprow(n),
'c' => board.flipcol(n),
_ => {
// We want to panic here because should NEVER
// have anything other than 'r' or 'c' here
panic!("How did you end up here?");
}
};
// Increment moves
moves += 1;
println!("Moves taken: {}", moves);
break 'userinput;
}
Err(e) => {
println!("Error reading input: {}", e);
break'mainloop;
}
}
} // 'userinput
if board == target {
println!("You win!");
break;
}
} //'mainloop
}
| eq | identifier_name |
main.rs | // For random generation
extern crate rand;
// For fmt::Display
use std::fmt;
// For I/O (stdin, stdout, etc)
use std::io::prelude::*;
use rand::Rng;
| cells: Vec<bool>,
/// The size of the board
size: usize,
}
// Functions for the Board struct
impl Board {
/// Generate a new, empty board, of size >= 1
///
/// Returns a Board in the "off" state, where all cells are 0.
/// If a size of 0 is given, a Board of size 1 will be created instead.
/// A mutable board is required for using Board::fliprow and Board::flipcol functions.
///
/// ```
/// let mut board: Board = Board::new(3);
/// ```
fn new(size: usize) -> Board {
// Ensure we make a board with a non-zero size
if size > 0 {
Board {
cells: vec![false; size * size],
size,
}
} else {
Board::new(1)
}
}
/// Flip the specified row
///
/// Returns true if the row is within the size, false otherwise.
///
/// ```
/// let mut board: Board = Board::new(3);
/// board.fliprow(1);
/// ```
fn fliprow(&mut self, row: usize) -> bool {
// Check constraints
if row > self.size {
return false;
}
// Starting position in the vector
let start = row * self.size;
// Loop through the vector row
for i in start..start + self.size {
self.cells[i] =!self.cells[i];
}
true
}
/// Flip the specified column
///
/// Returns true if the column is within the size, false otherwise.
///
/// ```
/// let mut board: Board = Board::new(3);
/// board.flipcol(0);
/// ```
fn flipcol(&mut self, col: usize) -> bool {
// Check constraints
if col > self.size {
return false;
}
// Loop through the vector column
for i in 0..self.size {
self.cells[col + i * self.size] =!self.cells[col + i * self.size];
}
true
}
/// Generate a random board
///
/// Returns a Board in a random state.
/// If a size of 0 is given, a Board of size 1 will be created instead.
///
/// ```
/// let target: Board = Board::random(3);
/// ```
fn random<R: Rng>(rng: &mut R, size: usize) -> Board {
// Ensure we make a board with a non-zero size
if size == 0 {
return Board::random(rng, 1);
}
// Make a vector of the board size with random bits
let cells = (0..size * size)
.map(|_| rng.gen::<bool>())
.collect::<Vec<_>>();
// Return the random board
Board { cells, size }
}
}
impl PartialEq for Board {
fn eq(&self, rhs: &Board) -> bool {
self.cells == rhs.cells
}
}
// Implement the Display format, used with `print!("{}", &board);`
impl fmt::Display for Board {
// Example output:
// 0 1 2
// 0 0 1 0
// 1 1 0 0
// 2 0 1 1
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Get the string width of the size of the board
let width = (self.size - 1).to_string().len();
// Write the initial spaces (upper left)
write!(f, "{space: >0$}", width, space = " ")?;
// Write the column numbers
for i in 0..self.size {
write!(f, " {offset:>0$}", width, offset = i)?;
}
// Newline for rows
writeln!(f)?;
// Loop through the rows
for row in 0..self.size {
// Write the row number
write!(f, "{row:>0$}", width, row = row)?;
// Loop through the columns
for col in 0..self.size {
// Get the value of the cell as 1 or 0
let p = self.cells[row * self.size + col] as usize;
// Write the column value
write!(f, " {col:>0$}", width, col = p)?;
}
// Newline for next row
writeln!(f)?;
}
// Return Formatter result
Ok(())
}
}
fn main() {
let mut rng = rand::thread_rng();
// The board size
let size: usize = 3;
// The target board
let target: Board = Board::random(&mut rng, size);
// The user board
let mut board: Board = Board::new(size);
// How many moves taken
let mut moves: u32 = 0;
// Loop until win or quit
'mainloop: loop {
// User input
let mut input: String;
// Write the boards
println!("Target:\n{}\nBoard:\n{}", &target, &board);
// User input loop
'userinput: loop {
// Prompt
print!("\nFlip? [q|[r|c]#] ");
// Flush stdout to write the previous print, if we can't then exit
match std::io::stdout().flush() {
Ok(_) => {}
Err(e) => {
println!("Error: cannot flush stdout: {}", e);
break'mainloop;
}
};
// Reset input for each loop
input = String::new();
// Read user input
match std::io::stdin().read_line(&mut input) {
Ok(_) => {
input = input.trim().to_string();
// Get the first character
let rc: char = match input.chars().next() {
Some(c) => c,
None => {
println!("Error: No input");
continue 'userinput;
}
};
// Make sure input is r, c, or q
if rc!= 'r' && rc!= 'c' && rc!= 'q' {
println!("Error: '{}': Must use 'r'ow or 'c'olumn or 'q'uit", rc);
continue 'userinput;
}
// If input is q, exit game
if rc == 'q' {
println!("Thanks for playing!");
break'mainloop;
}
// If input is r or c, get the number after
let n: usize = match input[1..].to_string().parse() {
Ok(x) => {
// If we're within bounds, return the parsed number
if x < size {
x
} else {
println!(
"Error: Must specify a row or column within size({})",
size
);
continue 'userinput;
}
}
Err(_) => {
println!(
"Error: '{}': Unable to parse row or column number",
input[1..].to_string()
);
continue 'userinput;
}
};
// Flip the row or column specified
match rc {
'r' => board.fliprow(n),
'c' => board.flipcol(n),
_ => {
// We want to panic here because should NEVER
// have anything other than 'r' or 'c' here
panic!("How did you end up here?");
}
};
// Increment moves
moves += 1;
println!("Moves taken: {}", moves);
break 'userinput;
}
Err(e) => {
println!("Error reading input: {}", e);
break'mainloop;
}
}
} // 'userinput
if board == target {
println!("You win!");
break;
}
} //'mainloop
} | /// A simple struct for a board
struct Board {
/// The cells of the board | random_line_split |
main.rs | // For random generation
extern crate rand;
// For fmt::Display
use std::fmt;
// For I/O (stdin, stdout, etc)
use std::io::prelude::*;
use rand::Rng;
/// A simple struct for a board
struct Board {
/// The cells of the board
cells: Vec<bool>,
/// The size of the board
size: usize,
}
// Functions for the Board struct
impl Board {
/// Generate a new, empty board, of size >= 1
///
/// Returns a Board in the "off" state, where all cells are 0.
/// If a size of 0 is given, a Board of size 1 will be created instead.
/// A mutable board is required for using Board::fliprow and Board::flipcol functions.
///
/// ```
/// let mut board: Board = Board::new(3);
/// ```
fn new(size: usize) -> Board {
// Ensure we make a board with a non-zero size
if size > 0 {
Board {
cells: vec![false; size * size],
size,
}
} else {
Board::new(1)
}
}
/// Flip the specified row
///
/// Returns true if the row is within the size, false otherwise.
///
/// ```
/// let mut board: Board = Board::new(3);
/// board.fliprow(1);
/// ```
fn fliprow(&mut self, row: usize) -> bool {
// Check constraints
if row > self.size {
return false;
}
// Starting position in the vector
let start = row * self.size;
// Loop through the vector row
for i in start..start + self.size {
self.cells[i] =!self.cells[i];
}
true
}
/// Flip the specified column
///
/// Returns true if the column is within the size, false otherwise.
///
/// ```
/// let mut board: Board = Board::new(3);
/// board.flipcol(0);
/// ```
fn flipcol(&mut self, col: usize) -> bool {
// Check constraints
if col > self.size {
return false;
}
// Loop through the vector column
for i in 0..self.size {
self.cells[col + i * self.size] =!self.cells[col + i * self.size];
}
true
}
/// Generate a random board
///
/// Returns a Board in a random state.
/// If a size of 0 is given, a Board of size 1 will be created instead.
///
/// ```
/// let target: Board = Board::random(3);
/// ```
fn random<R: Rng>(rng: &mut R, size: usize) -> Board {
// Ensure we make a board with a non-zero size
if size == 0 {
return Board::random(rng, 1);
}
// Make a vector of the board size with random bits
let cells = (0..size * size)
.map(|_| rng.gen::<bool>())
.collect::<Vec<_>>();
// Return the random board
Board { cells, size }
}
}
impl PartialEq for Board {
fn eq(&self, rhs: &Board) -> bool {
self.cells == rhs.cells
}
}
// Implement the Display format, used with `print!("{}", &board);`
impl fmt::Display for Board {
// Example output:
// 0 1 2
// 0 0 1 0
// 1 1 0 0
// 2 0 1 1
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Get the string width of the size of the board
let width = (self.size - 1).to_string().len();
// Write the initial spaces (upper left)
write!(f, "{space: >0$}", width, space = " ")?;
// Write the column numbers
for i in 0..self.size {
write!(f, " {offset:>0$}", width, offset = i)?;
}
// Newline for rows
writeln!(f)?;
// Loop through the rows
for row in 0..self.size {
// Write the row number
write!(f, "{row:>0$}", width, row = row)?;
// Loop through the columns
for col in 0..self.size {
// Get the value of the cell as 1 or 0
let p = self.cells[row * self.size + col] as usize;
// Write the column value
write!(f, " {col:>0$}", width, col = p)?;
}
// Newline for next row
writeln!(f)?;
}
// Return Formatter result
Ok(())
}
}
fn main() {
let mut rng = rand::thread_rng();
// The board size
let size: usize = 3;
// The target board
let target: Board = Board::random(&mut rng, size);
// The user board
let mut board: Board = Board::new(size);
// How many moves taken
let mut moves: u32 = 0;
// Loop until win or quit
'mainloop: loop {
// User input
let mut input: String;
// Write the boards
println!("Target:\n{}\nBoard:\n{}", &target, &board);
// User input loop
'userinput: loop {
// Prompt
print!("\nFlip? [q|[r|c]#] ");
// Flush stdout to write the previous print, if we can't then exit
match std::io::stdout().flush() {
Ok(_) => {}
Err(e) => {
println!("Error: cannot flush stdout: {}", e);
break'mainloop;
}
};
// Reset input for each loop
input = String::new();
// Read user input
match std::io::stdin().read_line(&mut input) {
Ok(_) => {
input = input.trim().to_string();
// Get the first character
let rc: char = match input.chars().next() {
Some(c) => c,
None => {
println!("Error: No input");
continue 'userinput;
}
};
// Make sure input is r, c, or q
if rc!= 'r' && rc!= 'c' && rc!= 'q' {
println!("Error: '{}': Must use 'r'ow or 'c'olumn or 'q'uit", rc);
continue 'userinput;
}
// If input is q, exit game
if rc == 'q' {
println!("Thanks for playing!");
break'mainloop;
}
// If input is r or c, get the number after
let n: usize = match input[1..].to_string().parse() {
Ok(x) => |
Err(_) => {
println!(
"Error: '{}': Unable to parse row or column number",
input[1..].to_string()
);
continue 'userinput;
}
};
// Flip the row or column specified
match rc {
'r' => board.fliprow(n),
'c' => board.flipcol(n),
_ => {
// We want to panic here because should NEVER
// have anything other than 'r' or 'c' here
panic!("How did you end up here?");
}
};
// Increment moves
moves += 1;
println!("Moves taken: {}", moves);
break 'userinput;
}
Err(e) => {
println!("Error reading input: {}", e);
break'mainloop;
}
}
} // 'userinput
if board == target {
println!("You win!");
break;
}
} //'mainloop
}
| {
// If we're within bounds, return the parsed number
if x < size {
x
} else {
println!(
"Error: Must specify a row or column within size({})",
size
);
continue 'userinput;
}
} | conditional_block |
main.rs | // For random generation
extern crate rand;
// For fmt::Display
use std::fmt;
// For I/O (stdin, stdout, etc)
use std::io::prelude::*;
use rand::Rng;
/// A simple struct for a board
struct Board {
/// The cells of the board
cells: Vec<bool>,
/// The size of the board
size: usize,
}
// Functions for the Board struct
impl Board {
/// Generate a new, empty board, of size >= 1
///
/// Returns a Board in the "off" state, where all cells are 0.
/// If a size of 0 is given, a Board of size 1 will be created instead.
/// A mutable board is required for using Board::fliprow and Board::flipcol functions.
///
/// ```
/// let mut board: Board = Board::new(3);
/// ```
fn new(size: usize) -> Board {
// Ensure we make a board with a non-zero size
if size > 0 {
Board {
cells: vec![false; size * size],
size,
}
} else {
Board::new(1)
}
}
/// Flip the specified row
///
/// Returns true if the row is within the size, false otherwise.
///
/// ```
/// let mut board: Board = Board::new(3);
/// board.fliprow(1);
/// ```
fn fliprow(&mut self, row: usize) -> bool {
// Check constraints
if row > self.size {
return false;
}
// Starting position in the vector
let start = row * self.size;
// Loop through the vector row
for i in start..start + self.size {
self.cells[i] =!self.cells[i];
}
true
}
/// Flip the specified column
///
/// Returns true if the column is within the size, false otherwise.
///
/// ```
/// let mut board: Board = Board::new(3);
/// board.flipcol(0);
/// ```
fn flipcol(&mut self, col: usize) -> bool |
/// Generate a random board
///
/// Returns a Board in a random state.
/// If a size of 0 is given, a Board of size 1 will be created instead.
///
/// ```
/// let target: Board = Board::random(3);
/// ```
fn random<R: Rng>(rng: &mut R, size: usize) -> Board {
// Ensure we make a board with a non-zero size
if size == 0 {
return Board::random(rng, 1);
}
// Make a vector of the board size with random bits
let cells = (0..size * size)
.map(|_| rng.gen::<bool>())
.collect::<Vec<_>>();
// Return the random board
Board { cells, size }
}
}
impl PartialEq for Board {
fn eq(&self, rhs: &Board) -> bool {
self.cells == rhs.cells
}
}
// Implement the Display format, used with `print!("{}", &board);`
impl fmt::Display for Board {
// Example output:
// 0 1 2
// 0 0 1 0
// 1 1 0 0
// 2 0 1 1
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Get the string width of the size of the board
let width = (self.size - 1).to_string().len();
// Write the initial spaces (upper left)
write!(f, "{space: >0$}", width, space = " ")?;
// Write the column numbers
for i in 0..self.size {
write!(f, " {offset:>0$}", width, offset = i)?;
}
// Newline for rows
writeln!(f)?;
// Loop through the rows
for row in 0..self.size {
// Write the row number
write!(f, "{row:>0$}", width, row = row)?;
// Loop through the columns
for col in 0..self.size {
// Get the value of the cell as 1 or 0
let p = self.cells[row * self.size + col] as usize;
// Write the column value
write!(f, " {col:>0$}", width, col = p)?;
}
// Newline for next row
writeln!(f)?;
}
// Return Formatter result
Ok(())
}
}
fn main() {
let mut rng = rand::thread_rng();
// The board size
let size: usize = 3;
// The target board
let target: Board = Board::random(&mut rng, size);
// The user board
let mut board: Board = Board::new(size);
// How many moves taken
let mut moves: u32 = 0;
// Loop until win or quit
'mainloop: loop {
// User input
let mut input: String;
// Write the boards
println!("Target:\n{}\nBoard:\n{}", &target, &board);
// User input loop
'userinput: loop {
// Prompt
print!("\nFlip? [q|[r|c]#] ");
// Flush stdout to write the previous print, if we can't then exit
match std::io::stdout().flush() {
Ok(_) => {}
Err(e) => {
println!("Error: cannot flush stdout: {}", e);
break'mainloop;
}
};
// Reset input for each loop
input = String::new();
// Read user input
match std::io::stdin().read_line(&mut input) {
Ok(_) => {
input = input.trim().to_string();
// Get the first character
let rc: char = match input.chars().next() {
Some(c) => c,
None => {
println!("Error: No input");
continue 'userinput;
}
};
// Make sure input is r, c, or q
if rc!= 'r' && rc!= 'c' && rc!= 'q' {
println!("Error: '{}': Must use 'r'ow or 'c'olumn or 'q'uit", rc);
continue 'userinput;
}
// If input is q, exit game
if rc == 'q' {
println!("Thanks for playing!");
break'mainloop;
}
// If input is r or c, get the number after
let n: usize = match input[1..].to_string().parse() {
Ok(x) => {
// If we're within bounds, return the parsed number
if x < size {
x
} else {
println!(
"Error: Must specify a row or column within size({})",
size
);
continue 'userinput;
}
}
Err(_) => {
println!(
"Error: '{}': Unable to parse row or column number",
input[1..].to_string()
);
continue 'userinput;
}
};
// Flip the row or column specified
match rc {
'r' => board.fliprow(n),
'c' => board.flipcol(n),
_ => {
// We want to panic here because should NEVER
// have anything other than 'r' or 'c' here
panic!("How did you end up here?");
}
};
// Increment moves
moves += 1;
println!("Moves taken: {}", moves);
break 'userinput;
}
Err(e) => {
println!("Error reading input: {}", e);
break'mainloop;
}
}
} // 'userinput
if board == target {
println!("You win!");
break;
}
} //'mainloop
}
| {
// Check constraints
if col > self.size {
return false;
}
// Loop through the vector column
for i in 0..self.size {
self.cells[col + i * self.size] = !self.cells[col + i * self.size];
}
true
} | identifier_body |
screenshot.rs | extern crate kiss3d;
extern crate nalgebra as na;
use std::path::Path;
use kiss3d::light::Light;
use kiss3d::window::Window;
use na::{UnitQuaternion, Vector3};
// Based on cube example.
fn main() | }
| {
let mut window = Window::new("Kiss3d: screenshot");
let mut c = window.add_cube(0.2, 0.2, 0.2);
c.set_color(1.0, 0.0, 0.0);
c.prepend_to_local_rotation(&UnitQuaternion::from_axis_angle(&Vector3::y_axis(), 0.785));
c.prepend_to_local_rotation(&UnitQuaternion::from_axis_angle(
&Vector3::x_axis(),
-0.6f32,
));
window.set_light(Light::StickToCamera);
while window.render() {
let img = window.snap_image();
let img_path = Path::new("screenshot.png");
img.save(img_path).unwrap();
println!("Screeshot saved to `screenshot.png`");
break;
} | identifier_body |
screenshot.rs | extern crate kiss3d;
extern crate nalgebra as na;
use std::path::Path;
use kiss3d::light::Light;
use kiss3d::window::Window;
use na::{UnitQuaternion, Vector3};
// Based on cube example.
fn | () {
let mut window = Window::new("Kiss3d: screenshot");
let mut c = window.add_cube(0.2, 0.2, 0.2);
c.set_color(1.0, 0.0, 0.0);
c.prepend_to_local_rotation(&UnitQuaternion::from_axis_angle(&Vector3::y_axis(), 0.785));
c.prepend_to_local_rotation(&UnitQuaternion::from_axis_angle(
&Vector3::x_axis(),
-0.6f32,
));
window.set_light(Light::StickToCamera);
while window.render() {
let img = window.snap_image();
let img_path = Path::new("screenshot.png");
img.save(img_path).unwrap();
println!("Screeshot saved to `screenshot.png`");
break;
}
}
| main | identifier_name |
screenshot.rs | extern crate kiss3d;
extern crate nalgebra as na;
use std::path::Path;
use kiss3d::light::Light;
use kiss3d::window::Window;
use na::{UnitQuaternion, Vector3};
// Based on cube example.
fn main() {
let mut window = Window::new("Kiss3d: screenshot");
let mut c = window.add_cube(0.2, 0.2, 0.2);
c.set_color(1.0, 0.0, 0.0);
c.prepend_to_local_rotation(&UnitQuaternion::from_axis_angle(&Vector3::y_axis(), 0.785));
c.prepend_to_local_rotation(&UnitQuaternion::from_axis_angle(
&Vector3::x_axis(),
-0.6f32, | while window.render() {
let img = window.snap_image();
let img_path = Path::new("screenshot.png");
img.save(img_path).unwrap();
println!("Screeshot saved to `screenshot.png`");
break;
}
} | ));
window.set_light(Light::StickToCamera);
| random_line_split |
mask.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use crate::relay_directive::RelayDirective;
use graphql_ir::{
FragmentDefinition, FragmentSpread, InlineFragment, OperationDefinition, Program, ScalarField,
Selection, Transformed, Transformer, VariableDefinition,
};
use indexmap::map::Entry;
use intern::string_key::StringKeyIndexMap;
use schema::Schema;
use std::{ops::RangeFull, sync::Arc};
/// Transform to inline fragment spreads with @relay(mask:false)
pub fn mask(program: &Program) -> Program {
let mut transform = Mask::new(program);
transform
.transform_program(program)
.replace_or_else(|| program.clone())
}
type JoinedArguments<'s> = StringKeyIndexMap<&'s VariableDefinition>;
struct Mask<'s> {
program: &'s Program,
current_reachable_arguments: Vec<&'s VariableDefinition>,
}
impl<'s> Mask<'s> {
fn new(program: &'s Program) -> Self {
Self {
program,
current_reachable_arguments: vec![],
} | joined_arguments.insert(variable.name.item, variable);
}
for arg in self.current_reachable_arguments.drain(..) {
match joined_arguments.entry(arg.name.item) {
Entry::Vacant(entry) => {
entry.insert(arg);
}
Entry::Occupied(mut entry) => {
let prev_arg = entry.get();
if self
.program
.schema
.is_type_subtype_of(&arg.type_, &prev_arg.type_)
{
entry.insert(arg);
}
}
}
}
let range = RangeFull;
fragment.used_global_variables = joined_arguments
.drain(range)
.map(|(_, v)| v)
.cloned()
.collect();
}
}
impl<'s> Transformer for Mask<'s> {
const NAME: &'static str = "MaskTransform";
const VISIT_ARGUMENTS: bool = false;
const VISIT_DIRECTIVES: bool = false;
fn transform_operation(
&mut self,
operation: &OperationDefinition,
) -> Transformed<OperationDefinition> {
let result = self.default_transform_operation(operation);
self.current_reachable_arguments.clear();
result
}
fn transform_fragment(
&mut self,
fragment: &FragmentDefinition,
) -> Transformed<FragmentDefinition> {
let result = self.default_transform_fragment(fragment);
if self.current_reachable_arguments.is_empty() {
result
} else {
Transformed::Replace(match result {
Transformed::Keep => {
let mut new_fragment = fragment.clone();
self.join_current_arguments_to_fragment(&mut new_fragment);
new_fragment
}
Transformed::Replace(mut new_fragment) => {
self.join_current_arguments_to_fragment(&mut new_fragment);
new_fragment
}
Transformed::Delete => {
panic!("Unexpected fragment deletion in mask transform.");
}
})
}
}
fn transform_fragment_spread(&mut self, spread: &FragmentSpread) -> Transformed<Selection> {
if RelayDirective::is_unmasked_fragment_spread(spread) {
let fragment = self.program.fragment(spread.fragment.item).unwrap();
self.current_reachable_arguments
.extend(&fragment.used_global_variables);
Transformed::Replace(Selection::InlineFragment(Arc::new(InlineFragment {
type_condition: Some(fragment.type_condition),
directives: vec![],
selections: self
.transform_selections(&fragment.selections)
.replace_or_else(|| fragment.selections.to_vec()),
})))
} else {
Transformed::Keep
}
}
fn transform_scalar_field(&mut self, _field: &ScalarField) -> Transformed<Selection> {
Transformed::Keep
}
} | }
fn join_current_arguments_to_fragment(&mut self, fragment: &mut FragmentDefinition) {
let mut joined_arguments = JoinedArguments::default();
for variable in &fragment.used_global_variables { | random_line_split |
mask.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use crate::relay_directive::RelayDirective;
use graphql_ir::{
FragmentDefinition, FragmentSpread, InlineFragment, OperationDefinition, Program, ScalarField,
Selection, Transformed, Transformer, VariableDefinition,
};
use indexmap::map::Entry;
use intern::string_key::StringKeyIndexMap;
use schema::Schema;
use std::{ops::RangeFull, sync::Arc};
/// Transform to inline fragment spreads with @relay(mask:false)
pub fn mask(program: &Program) -> Program {
let mut transform = Mask::new(program);
transform
.transform_program(program)
.replace_or_else(|| program.clone())
}
type JoinedArguments<'s> = StringKeyIndexMap<&'s VariableDefinition>;
struct Mask<'s> {
program: &'s Program,
current_reachable_arguments: Vec<&'s VariableDefinition>,
}
impl<'s> Mask<'s> {
fn new(program: &'s Program) -> Self {
Self {
program,
current_reachable_arguments: vec![],
}
}
fn join_current_arguments_to_fragment(&mut self, fragment: &mut FragmentDefinition) {
let mut joined_arguments = JoinedArguments::default();
for variable in &fragment.used_global_variables {
joined_arguments.insert(variable.name.item, variable);
}
for arg in self.current_reachable_arguments.drain(..) {
match joined_arguments.entry(arg.name.item) {
Entry::Vacant(entry) => {
entry.insert(arg);
}
Entry::Occupied(mut entry) => {
let prev_arg = entry.get();
if self
.program
.schema
.is_type_subtype_of(&arg.type_, &prev_arg.type_)
{
entry.insert(arg);
}
}
}
}
let range = RangeFull;
fragment.used_global_variables = joined_arguments
.drain(range)
.map(|(_, v)| v)
.cloned()
.collect();
}
}
impl<'s> Transformer for Mask<'s> {
const NAME: &'static str = "MaskTransform";
const VISIT_ARGUMENTS: bool = false;
const VISIT_DIRECTIVES: bool = false;
fn transform_operation(
&mut self,
operation: &OperationDefinition,
) -> Transformed<OperationDefinition> |
fn transform_fragment(
&mut self,
fragment: &FragmentDefinition,
) -> Transformed<FragmentDefinition> {
let result = self.default_transform_fragment(fragment);
if self.current_reachable_arguments.is_empty() {
result
} else {
Transformed::Replace(match result {
Transformed::Keep => {
let mut new_fragment = fragment.clone();
self.join_current_arguments_to_fragment(&mut new_fragment);
new_fragment
}
Transformed::Replace(mut new_fragment) => {
self.join_current_arguments_to_fragment(&mut new_fragment);
new_fragment
}
Transformed::Delete => {
panic!("Unexpected fragment deletion in mask transform.");
}
})
}
}
fn transform_fragment_spread(&mut self, spread: &FragmentSpread) -> Transformed<Selection> {
if RelayDirective::is_unmasked_fragment_spread(spread) {
let fragment = self.program.fragment(spread.fragment.item).unwrap();
self.current_reachable_arguments
.extend(&fragment.used_global_variables);
Transformed::Replace(Selection::InlineFragment(Arc::new(InlineFragment {
type_condition: Some(fragment.type_condition),
directives: vec![],
selections: self
.transform_selections(&fragment.selections)
.replace_or_else(|| fragment.selections.to_vec()),
})))
} else {
Transformed::Keep
}
}
fn transform_scalar_field(&mut self, _field: &ScalarField) -> Transformed<Selection> {
Transformed::Keep
}
}
| {
let result = self.default_transform_operation(operation);
self.current_reachable_arguments.clear();
result
} | identifier_body |
mask.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use crate::relay_directive::RelayDirective;
use graphql_ir::{
FragmentDefinition, FragmentSpread, InlineFragment, OperationDefinition, Program, ScalarField,
Selection, Transformed, Transformer, VariableDefinition,
};
use indexmap::map::Entry;
use intern::string_key::StringKeyIndexMap;
use schema::Schema;
use std::{ops::RangeFull, sync::Arc};
/// Transform to inline fragment spreads with @relay(mask:false)
pub fn mask(program: &Program) -> Program {
let mut transform = Mask::new(program);
transform
.transform_program(program)
.replace_or_else(|| program.clone())
}
type JoinedArguments<'s> = StringKeyIndexMap<&'s VariableDefinition>;
struct Mask<'s> {
program: &'s Program,
current_reachable_arguments: Vec<&'s VariableDefinition>,
}
impl<'s> Mask<'s> {
fn new(program: &'s Program) -> Self {
Self {
program,
current_reachable_arguments: vec![],
}
}
fn join_current_arguments_to_fragment(&mut self, fragment: &mut FragmentDefinition) {
let mut joined_arguments = JoinedArguments::default();
for variable in &fragment.used_global_variables {
joined_arguments.insert(variable.name.item, variable);
}
for arg in self.current_reachable_arguments.drain(..) {
match joined_arguments.entry(arg.name.item) {
Entry::Vacant(entry) => {
entry.insert(arg);
}
Entry::Occupied(mut entry) => {
let prev_arg = entry.get();
if self
.program
.schema
.is_type_subtype_of(&arg.type_, &prev_arg.type_)
|
}
}
}
let range = RangeFull;
fragment.used_global_variables = joined_arguments
.drain(range)
.map(|(_, v)| v)
.cloned()
.collect();
}
}
impl<'s> Transformer for Mask<'s> {
const NAME: &'static str = "MaskTransform";
const VISIT_ARGUMENTS: bool = false;
const VISIT_DIRECTIVES: bool = false;
fn transform_operation(
&mut self,
operation: &OperationDefinition,
) -> Transformed<OperationDefinition> {
let result = self.default_transform_operation(operation);
self.current_reachable_arguments.clear();
result
}
fn transform_fragment(
&mut self,
fragment: &FragmentDefinition,
) -> Transformed<FragmentDefinition> {
let result = self.default_transform_fragment(fragment);
if self.current_reachable_arguments.is_empty() {
result
} else {
Transformed::Replace(match result {
Transformed::Keep => {
let mut new_fragment = fragment.clone();
self.join_current_arguments_to_fragment(&mut new_fragment);
new_fragment
}
Transformed::Replace(mut new_fragment) => {
self.join_current_arguments_to_fragment(&mut new_fragment);
new_fragment
}
Transformed::Delete => {
panic!("Unexpected fragment deletion in mask transform.");
}
})
}
}
fn transform_fragment_spread(&mut self, spread: &FragmentSpread) -> Transformed<Selection> {
if RelayDirective::is_unmasked_fragment_spread(spread) {
let fragment = self.program.fragment(spread.fragment.item).unwrap();
self.current_reachable_arguments
.extend(&fragment.used_global_variables);
Transformed::Replace(Selection::InlineFragment(Arc::new(InlineFragment {
type_condition: Some(fragment.type_condition),
directives: vec![],
selections: self
.transform_selections(&fragment.selections)
.replace_or_else(|| fragment.selections.to_vec()),
})))
} else {
Transformed::Keep
}
}
fn transform_scalar_field(&mut self, _field: &ScalarField) -> Transformed<Selection> {
Transformed::Keep
}
}
| {
entry.insert(arg);
} | conditional_block |
mask.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use crate::relay_directive::RelayDirective;
use graphql_ir::{
FragmentDefinition, FragmentSpread, InlineFragment, OperationDefinition, Program, ScalarField,
Selection, Transformed, Transformer, VariableDefinition,
};
use indexmap::map::Entry;
use intern::string_key::StringKeyIndexMap;
use schema::Schema;
use std::{ops::RangeFull, sync::Arc};
/// Transform to inline fragment spreads with @relay(mask:false)
pub fn mask(program: &Program) -> Program {
let mut transform = Mask::new(program);
transform
.transform_program(program)
.replace_or_else(|| program.clone())
}
type JoinedArguments<'s> = StringKeyIndexMap<&'s VariableDefinition>;
struct Mask<'s> {
program: &'s Program,
current_reachable_arguments: Vec<&'s VariableDefinition>,
}
impl<'s> Mask<'s> {
fn new(program: &'s Program) -> Self {
Self {
program,
current_reachable_arguments: vec![],
}
}
fn | (&mut self, fragment: &mut FragmentDefinition) {
let mut joined_arguments = JoinedArguments::default();
for variable in &fragment.used_global_variables {
joined_arguments.insert(variable.name.item, variable);
}
for arg in self.current_reachable_arguments.drain(..) {
match joined_arguments.entry(arg.name.item) {
Entry::Vacant(entry) => {
entry.insert(arg);
}
Entry::Occupied(mut entry) => {
let prev_arg = entry.get();
if self
.program
.schema
.is_type_subtype_of(&arg.type_, &prev_arg.type_)
{
entry.insert(arg);
}
}
}
}
let range = RangeFull;
fragment.used_global_variables = joined_arguments
.drain(range)
.map(|(_, v)| v)
.cloned()
.collect();
}
}
impl<'s> Transformer for Mask<'s> {
const NAME: &'static str = "MaskTransform";
const VISIT_ARGUMENTS: bool = false;
const VISIT_DIRECTIVES: bool = false;
fn transform_operation(
&mut self,
operation: &OperationDefinition,
) -> Transformed<OperationDefinition> {
let result = self.default_transform_operation(operation);
self.current_reachable_arguments.clear();
result
}
fn transform_fragment(
&mut self,
fragment: &FragmentDefinition,
) -> Transformed<FragmentDefinition> {
let result = self.default_transform_fragment(fragment);
if self.current_reachable_arguments.is_empty() {
result
} else {
Transformed::Replace(match result {
Transformed::Keep => {
let mut new_fragment = fragment.clone();
self.join_current_arguments_to_fragment(&mut new_fragment);
new_fragment
}
Transformed::Replace(mut new_fragment) => {
self.join_current_arguments_to_fragment(&mut new_fragment);
new_fragment
}
Transformed::Delete => {
panic!("Unexpected fragment deletion in mask transform.");
}
})
}
}
fn transform_fragment_spread(&mut self, spread: &FragmentSpread) -> Transformed<Selection> {
if RelayDirective::is_unmasked_fragment_spread(spread) {
let fragment = self.program.fragment(spread.fragment.item).unwrap();
self.current_reachable_arguments
.extend(&fragment.used_global_variables);
Transformed::Replace(Selection::InlineFragment(Arc::new(InlineFragment {
type_condition: Some(fragment.type_condition),
directives: vec![],
selections: self
.transform_selections(&fragment.selections)
.replace_or_else(|| fragment.selections.to_vec()),
})))
} else {
Transformed::Keep
}
}
fn transform_scalar_field(&mut self, _field: &ScalarField) -> Transformed<Selection> {
Transformed::Keep
}
}
| join_current_arguments_to_fragment | identifier_name |
word2vec.rs |
extern crate vect;
extern crate argparse;
use vect::termcounts;
use vect::ingestion;
use vect::dictionary::Dictionary;
use argparse::{ArgumentParser, Store};
use vect::huffman;
use vect::base::split_words;
use std::io;
// This is labeled public to shut up the linter about dead code
pub fn main() {
let mut input_filename = "word2vec_input".to_string();
let mut output_filename = "word2vec_termcounts".to_string();
{ // Limit the scope of ap.refer()
let mut ap = ArgumentParser::new();
ap.set_description("Create vanilla word2vec vectors for a given dataset.");
ap.refer(&mut input_filename)
.add_argument("input_filename", Store,
"Input corpus, formatted one document per line");
ap.refer(&mut output_filename)
.add_argument("output", Store,
"Output term count table");
ap.parse_args_or_exit();
}
let input_file = ingestion::ingest_lines(&input_filename).ok()
.expect("Failed to load input file. Is it missing?");
let counts = termcounts::count_terms(1, input_file).ok()
.expect("Problems while reading the input file.");
let tree = huffman::create_huffman_tree(&counts);
termcounts::export_dictionary(&counts, &output_filename);
let mut dictionary = Dictionary::new(&counts);
train(&mut dictionary, &input_filename);
//println!("Huffman Tree: {:?}", huffman::create_huffman_tree(&term_counts));
}
fn train(dictionary: &mut Dictionary, input_filename: &str) -> io::Result<()> | {
for line in try!(ingestion::ingest_lines(&input_filename)) {
let line = try!(line);
let words = split_words(&line);
for i in 0..words.len()-1 {
dictionary.update_both(&words[i], &words[i+1]);
}
}
Ok(())
} | identifier_body |
|
word2vec.rs | extern crate vect;
extern crate argparse;
use vect::termcounts;
use vect::ingestion;
use vect::dictionary::Dictionary;
use argparse::{ArgumentParser, Store};
use vect::huffman;
use vect::base::split_words;
use std::io;
| // This is labeled public to shut up the linter about dead code
pub fn main() {
let mut input_filename = "word2vec_input".to_string();
let mut output_filename = "word2vec_termcounts".to_string();
{ // Limit the scope of ap.refer()
let mut ap = ArgumentParser::new();
ap.set_description("Create vanilla word2vec vectors for a given dataset.");
ap.refer(&mut input_filename)
.add_argument("input_filename", Store,
"Input corpus, formatted one document per line");
ap.refer(&mut output_filename)
.add_argument("output", Store,
"Output term count table");
ap.parse_args_or_exit();
}
let input_file = ingestion::ingest_lines(&input_filename).ok()
.expect("Failed to load input file. Is it missing?");
let counts = termcounts::count_terms(1, input_file).ok()
.expect("Problems while reading the input file.");
let tree = huffman::create_huffman_tree(&counts);
termcounts::export_dictionary(&counts, &output_filename);
let mut dictionary = Dictionary::new(&counts);
train(&mut dictionary, &input_filename);
//println!("Huffman Tree: {:?}", huffman::create_huffman_tree(&term_counts));
}
fn train(dictionary: &mut Dictionary, input_filename: &str) -> io::Result<()> {
for line in try!(ingestion::ingest_lines(&input_filename)) {
let line = try!(line);
let words = split_words(&line);
for i in 0..words.len()-1 {
dictionary.update_both(&words[i], &words[i+1]);
}
}
Ok(())
} | random_line_split |
|
word2vec.rs |
extern crate vect;
extern crate argparse;
use vect::termcounts;
use vect::ingestion;
use vect::dictionary::Dictionary;
use argparse::{ArgumentParser, Store};
use vect::huffman;
use vect::base::split_words;
use std::io;
// This is labeled public to shut up the linter about dead code
pub fn main() {
let mut input_filename = "word2vec_input".to_string();
let mut output_filename = "word2vec_termcounts".to_string();
{ // Limit the scope of ap.refer()
let mut ap = ArgumentParser::new();
ap.set_description("Create vanilla word2vec vectors for a given dataset.");
ap.refer(&mut input_filename)
.add_argument("input_filename", Store,
"Input corpus, formatted one document per line");
ap.refer(&mut output_filename)
.add_argument("output", Store,
"Output term count table");
ap.parse_args_or_exit();
}
let input_file = ingestion::ingest_lines(&input_filename).ok()
.expect("Failed to load input file. Is it missing?");
let counts = termcounts::count_terms(1, input_file).ok()
.expect("Problems while reading the input file.");
let tree = huffman::create_huffman_tree(&counts);
termcounts::export_dictionary(&counts, &output_filename);
let mut dictionary = Dictionary::new(&counts);
train(&mut dictionary, &input_filename);
//println!("Huffman Tree: {:?}", huffman::create_huffman_tree(&term_counts));
}
fn | (dictionary: &mut Dictionary, input_filename: &str) -> io::Result<()> {
for line in try!(ingestion::ingest_lines(&input_filename)) {
let line = try!(line);
let words = split_words(&line);
for i in 0..words.len()-1 {
dictionary.update_both(&words[i], &words[i+1]);
}
}
Ok(())
}
| train | identifier_name |
arbitrary_self_types_pointers_and_wrappers.rs | // run-pass
#![feature(arbitrary_self_types, unsize, coerce_unsized, dispatch_from_dyn)]
#![feature(rustc_attrs)]
use std::{
ops::{Deref, CoerceUnsized, DispatchFromDyn},
marker::Unsize,
};
struct Ptr<T:?Sized>(Box<T>);
impl<T:?Sized> Deref for Ptr<T> {
type Target = T;
fn deref(&self) -> &T {
&*self.0
}
}
impl<T: Unsize<U> +?Sized, U:?Sized> CoerceUnsized<Ptr<U>> for Ptr<T> {}
impl<T: Unsize<U> +?Sized, U:?Sized> DispatchFromDyn<Ptr<U>> for Ptr<T> {}
struct Wrapper<T:?Sized>(T);
impl<T:?Sized> Deref for Wrapper<T> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
impl<T: CoerceUnsized<U>, U> CoerceUnsized<Wrapper<U>> for Wrapper<T> {}
impl<T: DispatchFromDyn<U>, U> DispatchFromDyn<Wrapper<U>> for Wrapper<T> {}
trait Trait {
// This method isn't object-safe yet. Unsized by-value `self` is object-safe (but not callable
// without unsized_locals), but wrappers arond `Self` currently are not.
// FIXME (mikeyhew) uncomment this when unsized rvalues object-safety is implemented
// fn wrapper(self: Wrapper<Self>) -> i32;
fn ptr_wrapper(self: Ptr<Wrapper<Self>>) -> i32;
fn wrapper_ptr(self: Wrapper<Ptr<Self>>) -> i32;
fn wrapper_ptr_wrapper(self: Wrapper<Ptr<Wrapper<Self>>>) -> i32;
}
impl Trait for i32 {
fn ptr_wrapper(self: Ptr<Wrapper<Self>>) -> i32 {
**self
}
fn wrapper_ptr(self: Wrapper<Ptr<Self>>) -> i32 {
**self
}
fn wrapper_ptr_wrapper(self: Wrapper<Ptr<Wrapper<Self>>>) -> i32 {
***self
}
}
fn main() {
let pw = Ptr(Box::new(Wrapper(5))) as Ptr<Wrapper<dyn Trait>>;
assert_eq!(pw.ptr_wrapper(), 5);
let wp = Wrapper(Ptr(Box::new(6))) as Wrapper<Ptr<dyn Trait>>;
assert_eq!(wp.wrapper_ptr(), 6);
let wpw = Wrapper(Ptr(Box::new(Wrapper(7)))) as Wrapper<Ptr<Wrapper<dyn Trait>>>;
assert_eq!(wpw.wrapper_ptr_wrapper(), 7); | } | random_line_split |
|
arbitrary_self_types_pointers_and_wrappers.rs | // run-pass
#![feature(arbitrary_self_types, unsize, coerce_unsized, dispatch_from_dyn)]
#![feature(rustc_attrs)]
use std::{
ops::{Deref, CoerceUnsized, DispatchFromDyn},
marker::Unsize,
};
struct Ptr<T:?Sized>(Box<T>);
impl<T:?Sized> Deref for Ptr<T> {
type Target = T;
fn deref(&self) -> &T |
}
impl<T: Unsize<U> +?Sized, U:?Sized> CoerceUnsized<Ptr<U>> for Ptr<T> {}
impl<T: Unsize<U> +?Sized, U:?Sized> DispatchFromDyn<Ptr<U>> for Ptr<T> {}
struct Wrapper<T:?Sized>(T);
impl<T:?Sized> Deref for Wrapper<T> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
impl<T: CoerceUnsized<U>, U> CoerceUnsized<Wrapper<U>> for Wrapper<T> {}
impl<T: DispatchFromDyn<U>, U> DispatchFromDyn<Wrapper<U>> for Wrapper<T> {}
trait Trait {
// This method isn't object-safe yet. Unsized by-value `self` is object-safe (but not callable
// without unsized_locals), but wrappers arond `Self` currently are not.
// FIXME (mikeyhew) uncomment this when unsized rvalues object-safety is implemented
// fn wrapper(self: Wrapper<Self>) -> i32;
fn ptr_wrapper(self: Ptr<Wrapper<Self>>) -> i32;
fn wrapper_ptr(self: Wrapper<Ptr<Self>>) -> i32;
fn wrapper_ptr_wrapper(self: Wrapper<Ptr<Wrapper<Self>>>) -> i32;
}
impl Trait for i32 {
fn ptr_wrapper(self: Ptr<Wrapper<Self>>) -> i32 {
**self
}
fn wrapper_ptr(self: Wrapper<Ptr<Self>>) -> i32 {
**self
}
fn wrapper_ptr_wrapper(self: Wrapper<Ptr<Wrapper<Self>>>) -> i32 {
***self
}
}
fn main() {
let pw = Ptr(Box::new(Wrapper(5))) as Ptr<Wrapper<dyn Trait>>;
assert_eq!(pw.ptr_wrapper(), 5);
let wp = Wrapper(Ptr(Box::new(6))) as Wrapper<Ptr<dyn Trait>>;
assert_eq!(wp.wrapper_ptr(), 6);
let wpw = Wrapper(Ptr(Box::new(Wrapper(7)))) as Wrapper<Ptr<Wrapper<dyn Trait>>>;
assert_eq!(wpw.wrapper_ptr_wrapper(), 7);
}
| {
&*self.0
} | identifier_body |
arbitrary_self_types_pointers_and_wrappers.rs | // run-pass
#![feature(arbitrary_self_types, unsize, coerce_unsized, dispatch_from_dyn)]
#![feature(rustc_attrs)]
use std::{
ops::{Deref, CoerceUnsized, DispatchFromDyn},
marker::Unsize,
};
struct Ptr<T:?Sized>(Box<T>);
impl<T:?Sized> Deref for Ptr<T> {
type Target = T;
fn deref(&self) -> &T {
&*self.0
}
}
impl<T: Unsize<U> +?Sized, U:?Sized> CoerceUnsized<Ptr<U>> for Ptr<T> {}
impl<T: Unsize<U> +?Sized, U:?Sized> DispatchFromDyn<Ptr<U>> for Ptr<T> {}
struct | <T:?Sized>(T);
impl<T:?Sized> Deref for Wrapper<T> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
impl<T: CoerceUnsized<U>, U> CoerceUnsized<Wrapper<U>> for Wrapper<T> {}
impl<T: DispatchFromDyn<U>, U> DispatchFromDyn<Wrapper<U>> for Wrapper<T> {}
trait Trait {
// This method isn't object-safe yet. Unsized by-value `self` is object-safe (but not callable
// without unsized_locals), but wrappers arond `Self` currently are not.
// FIXME (mikeyhew) uncomment this when unsized rvalues object-safety is implemented
// fn wrapper(self: Wrapper<Self>) -> i32;
fn ptr_wrapper(self: Ptr<Wrapper<Self>>) -> i32;
fn wrapper_ptr(self: Wrapper<Ptr<Self>>) -> i32;
fn wrapper_ptr_wrapper(self: Wrapper<Ptr<Wrapper<Self>>>) -> i32;
}
impl Trait for i32 {
fn ptr_wrapper(self: Ptr<Wrapper<Self>>) -> i32 {
**self
}
fn wrapper_ptr(self: Wrapper<Ptr<Self>>) -> i32 {
**self
}
fn wrapper_ptr_wrapper(self: Wrapper<Ptr<Wrapper<Self>>>) -> i32 {
***self
}
}
fn main() {
let pw = Ptr(Box::new(Wrapper(5))) as Ptr<Wrapper<dyn Trait>>;
assert_eq!(pw.ptr_wrapper(), 5);
let wp = Wrapper(Ptr(Box::new(6))) as Wrapper<Ptr<dyn Trait>>;
assert_eq!(wp.wrapper_ptr(), 6);
let wpw = Wrapper(Ptr(Box::new(Wrapper(7)))) as Wrapper<Ptr<Wrapper<dyn Trait>>>;
assert_eq!(wpw.wrapper_ptr_wrapper(), 7);
}
| Wrapper | identifier_name |
unboxed-closures-by-ref.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(unboxed_closures)]
// Test by-ref capture of environment in unboxed closure types
fn call_fn<F: Fn()>(f: F) {
f()
}
fn call_fn_mut<F: FnMut()>(mut f: F) { | f()
}
fn main() {
let mut x = 0u;
let y = 2u;
call_fn(|&:| assert_eq!(x, 0));
call_fn_mut(|&mut:| x += y);
call_fn_once(|:| x += y);
assert_eq!(x, y * 2);
} | f()
}
fn call_fn_once<F: FnOnce()>(f: F) { | random_line_split |
unboxed-closures-by-ref.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(unboxed_closures)]
// Test by-ref capture of environment in unboxed closure types
fn call_fn<F: Fn()>(f: F) {
f()
}
fn call_fn_mut<F: FnMut()>(mut f: F) |
fn call_fn_once<F: FnOnce()>(f: F) {
f()
}
fn main() {
let mut x = 0u;
let y = 2u;
call_fn(|&:| assert_eq!(x, 0));
call_fn_mut(|&mut:| x += y);
call_fn_once(|:| x += y);
assert_eq!(x, y * 2);
}
| {
f()
} | identifier_body |
unboxed-closures-by-ref.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(unboxed_closures)]
// Test by-ref capture of environment in unboxed closure types
fn call_fn<F: Fn()>(f: F) {
f()
}
fn | <F: FnMut()>(mut f: F) {
f()
}
fn call_fn_once<F: FnOnce()>(f: F) {
f()
}
fn main() {
let mut x = 0u;
let y = 2u;
call_fn(|&:| assert_eq!(x, 0));
call_fn_mut(|&mut:| x += y);
call_fn_once(|:| x += y);
assert_eq!(x, y * 2);
}
| call_fn_mut | identifier_name |
authserver.rs |
pub struct AuthServer {
cipher: Cipher
}
impl AuthServer {
fn new(cipher: &Cipher) -> AuthServer {
AuthServer {
cipher: cipher
}
}
fn authenticate(&self, auth_string: &Vec<u8>) -> Response {
}
}
impl Receiver for AuthServer {
fn receive(method: &str, data: &Vec<u8>) -> Response {
match method {
"authenticate" => self.authenticate(&data),
_ => mkerr!(...)
}
}
}
#[derive(Show)]
enum Role {
user,
admin
}
impl FromStr for Role {
fn from_str(s: &str) -> Result<Self, err::Error> {
match s {
"user" => Self::user,
"admin" => Self::admin,
_ => mkerr!(format!("not a valid role: {}", s))
}
}
}
struct User {
email: String,
uid: u32,
role: Role
}
impl User {
fn new(email: &str, uid: u32, role: &Role) -> User |
fn encode(&self) -> Result<String, err::Error> {
Ok(try!(url::encode(&vec![
("email", &self.email),
("uid", self.uid),
("role", &format!("{:?}", self)])))
}
fn decode(param_string: &str) -> Result<User, err::Error> {
let params = try!(url::decode(¶m_string));
Ok(User {
email: params[0][1],
uid: params[1][1].parse::<u32>(),
role:
}
}
| {
User {
email: email.clone(),
uid: uid,
role: role
}
} | identifier_body |
authserver.rs | pub struct AuthServer {
cipher: Cipher
}
impl AuthServer { | fn new(cipher: &Cipher) -> AuthServer {
AuthServer {
cipher: cipher
}
}
fn authenticate(&self, auth_string: &Vec<u8>) -> Response {
}
}
impl Receiver for AuthServer {
fn receive(method: &str, data: &Vec<u8>) -> Response {
match method {
"authenticate" => self.authenticate(&data),
_ => mkerr!(...)
}
}
}
#[derive(Show)]
enum Role {
user,
admin
}
impl FromStr for Role {
fn from_str(s: &str) -> Result<Self, err::Error> {
match s {
"user" => Self::user,
"admin" => Self::admin,
_ => mkerr!(format!("not a valid role: {}", s))
}
}
}
struct User {
email: String,
uid: u32,
role: Role
}
impl User {
fn new(email: &str, uid: u32, role: &Role) -> User {
User {
email: email.clone(),
uid: uid,
role: role
}
}
fn encode(&self) -> Result<String, err::Error> {
Ok(try!(url::encode(&vec![
("email", &self.email),
("uid", self.uid),
("role", &format!("{:?}", self)])))
}
fn decode(param_string: &str) -> Result<User, err::Error> {
let params = try!(url::decode(¶m_string));
Ok(User {
email: params[0][1],
uid: params[1][1].parse::<u32>(),
role:
}
} | random_line_split |
|
authserver.rs |
pub struct | {
cipher: Cipher
}
impl AuthServer {
fn new(cipher: &Cipher) -> AuthServer {
AuthServer {
cipher: cipher
}
}
fn authenticate(&self, auth_string: &Vec<u8>) -> Response {
}
}
impl Receiver for AuthServer {
fn receive(method: &str, data: &Vec<u8>) -> Response {
match method {
"authenticate" => self.authenticate(&data),
_ => mkerr!(...)
}
}
}
#[derive(Show)]
enum Role {
user,
admin
}
impl FromStr for Role {
fn from_str(s: &str) -> Result<Self, err::Error> {
match s {
"user" => Self::user,
"admin" => Self::admin,
_ => mkerr!(format!("not a valid role: {}", s))
}
}
}
struct User {
email: String,
uid: u32,
role: Role
}
impl User {
fn new(email: &str, uid: u32, role: &Role) -> User {
User {
email: email.clone(),
uid: uid,
role: role
}
}
fn encode(&self) -> Result<String, err::Error> {
Ok(try!(url::encode(&vec![
("email", &self.email),
("uid", self.uid),
("role", &format!("{:?}", self)])))
}
fn decode(param_string: &str) -> Result<User, err::Error> {
let params = try!(url::decode(¶m_string));
Ok(User {
email: params[0][1],
uid: params[1][1].parse::<u32>(),
role:
}
}
| AuthServer | identifier_name |
font_collection.rs | // Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use font_descriptor;
use font_descriptor::{CTFontDescriptor, CTFontDescriptorCreateMatchingFontDescriptors};
use font_manager::CTFontManagerCopyAvailableFontFamilyNames;
use core_foundation::array::{CFArray, CFArrayRef};
use core_foundation::base::{CFRelease, CFRetain, CFTypeID, CFTypeRef, TCFType};
use core_foundation::dictionary::{CFDictionary, CFDictionaryRef};
use core_foundation::number::CFNumber;
use core_foundation::set::CFSet;
use core_foundation::string::{CFString, CFStringRef};
use libc::c_void;
use std::mem;
use std::ptr;
#[repr(C)]
pub struct __CTFontCollection(c_void);
pub type CTFontCollectionRef = *const __CTFontCollection;
#[derive(Debug)]
pub struct CTFontCollection {
obj: CTFontCollectionRef,
}
impl Drop for CTFontCollection {
fn drop(&mut self) {
unsafe {
CFRelease(self.as_CFTypeRef())
}
}
}
impl TCFType<CTFontCollectionRef> for CTFontCollection {
#[inline]
fn as_concrete_TypeRef(&self) -> CTFontCollectionRef {
self.obj
}
#[inline]
unsafe fn wrap_under_get_rule(reference: CTFontCollectionRef) -> CTFontCollection {
let reference: CTFontCollectionRef = mem::transmute(CFRetain(mem::transmute(reference)));
TCFType::wrap_under_create_rule(reference)
}
#[inline]
unsafe fn wrap_under_create_rule(obj: CTFontCollectionRef) -> CTFontCollection {
CTFontCollection {
obj: obj,
}
}
#[inline]
fn as_CFTypeRef(&self) -> CFTypeRef {
unsafe {
mem::transmute(self.as_concrete_TypeRef())
}
}
#[inline]
fn type_id() -> CFTypeID {
unsafe {
CTFontCollectionGetTypeID()
}
}
}
impl CTFontCollection {
pub fn get_descriptors(&self) -> CFArray {
// surprise! this function follows the Get rule, despite being named *Create*.
// So we have to addRef it to avoid CTFontCollection from double freeing it later.
unsafe {
TCFType::wrap_under_get_rule(CTFontCollectionCreateMatchingFontDescriptors(self.obj))
}
}
}
pub fn new_from_descriptors(descs: &CFArray) -> CTFontCollection {
unsafe {
let key: CFString = TCFType::wrap_under_get_rule(kCTFontCollectionRemoveDuplicatesOption);
let value = CFNumber::from_i64(1);
let options = CFDictionary::from_CFType_pairs(&[ (key.as_CFType(), value.as_CFType()) ]);
let font_collection_ref =
CTFontCollectionCreateWithFontDescriptors(descs.as_concrete_TypeRef(),
options.as_concrete_TypeRef());
TCFType::wrap_under_create_rule(font_collection_ref)
}
}
pub fn create_for_all_families() -> CTFontCollection {
unsafe {
let key: CFString = TCFType::wrap_under_get_rule(kCTFontCollectionRemoveDuplicatesOption);
let value = CFNumber::from_i64(1);
let options = CFDictionary::from_CFType_pairs(&[ (key.as_CFType(), value.as_CFType()) ]);
let font_collection_ref =
CTFontCollectionCreateFromAvailableFonts(options.as_concrete_TypeRef());
TCFType::wrap_under_create_rule(font_collection_ref)
}
}
pub fn create_for_family(family: &str) -> Option<CTFontCollection> {
use font_descriptor::kCTFontFamilyNameAttribute;
unsafe {
let family_attr: CFString = TCFType::wrap_under_get_rule(kCTFontFamilyNameAttribute);
let family_name: CFString = family.parse().unwrap();
let specified_attrs = CFDictionary::from_CFType_pairs(&[
(family_attr.as_CFType(), family_name.as_CFType())
]);
let wildcard_desc: CTFontDescriptor =
font_descriptor::new_from_attributes(&specified_attrs);
let mandatory_attrs = CFSet::from_slice(&[ family_attr.as_CFType() ]);
let matched_descs = CTFontDescriptorCreateMatchingFontDescriptors(
wildcard_desc.as_concrete_TypeRef(),
mandatory_attrs.as_concrete_TypeRef());
if matched_descs == ptr::null() {
return None;
}
let matched_descs: CFArray = TCFType::wrap_under_create_rule(matched_descs);
// I suppose one doesn't even need the CTFontCollection object at this point.
// But we stick descriptors into and out of it just to provide a nice wrapper API.
Some(new_from_descriptors(&matched_descs))
}
}
pub fn get_family_names() -> CFArray |
extern {
/*
* CTFontCollection.h
*/
static kCTFontCollectionRemoveDuplicatesOption: CFStringRef;
//fn CTFontCollectionCreateCopyWithFontDescriptors(original: CTFontCollectionRef,
// descriptors: CFArrayRef,
// options: CFDictionaryRef) -> CTFontCollectionRef;
fn CTFontCollectionCreateFromAvailableFonts(options: CFDictionaryRef) -> CTFontCollectionRef;
// this stupid function doesn't actually do any wildcard expansion;
// it just chooses the best match. Use
// CTFontDescriptorCreateMatchingDescriptors instead.
fn CTFontCollectionCreateMatchingFontDescriptors(collection: CTFontCollectionRef) -> CFArrayRef;
fn CTFontCollectionCreateWithFontDescriptors(descriptors: CFArrayRef,
options: CFDictionaryRef) -> CTFontCollectionRef;
//fn CTFontCollectionCreateMatchingFontDescriptorsSortedWithCallback;
fn CTFontCollectionGetTypeID() -> CFTypeID;
}
| {
unsafe {
TCFType::wrap_under_create_rule(CTFontManagerCopyAvailableFontFamilyNames())
}
} | identifier_body |
font_collection.rs | // Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use font_descriptor;
use font_descriptor::{CTFontDescriptor, CTFontDescriptorCreateMatchingFontDescriptors};
use font_manager::CTFontManagerCopyAvailableFontFamilyNames;
use core_foundation::array::{CFArray, CFArrayRef};
use core_foundation::base::{CFRelease, CFRetain, CFTypeID, CFTypeRef, TCFType};
use core_foundation::dictionary::{CFDictionary, CFDictionaryRef};
use core_foundation::number::CFNumber;
use core_foundation::set::CFSet;
use core_foundation::string::{CFString, CFStringRef};
use libc::c_void;
use std::mem;
use std::ptr;
#[repr(C)]
pub struct __CTFontCollection(c_void);
pub type CTFontCollectionRef = *const __CTFontCollection;
#[derive(Debug)]
pub struct CTFontCollection {
obj: CTFontCollectionRef,
}
impl Drop for CTFontCollection {
fn drop(&mut self) {
unsafe {
CFRelease(self.as_CFTypeRef())
}
}
}
impl TCFType<CTFontCollectionRef> for CTFontCollection {
#[inline]
fn as_concrete_TypeRef(&self) -> CTFontCollectionRef {
self.obj
}
#[inline]
unsafe fn wrap_under_get_rule(reference: CTFontCollectionRef) -> CTFontCollection {
let reference: CTFontCollectionRef = mem::transmute(CFRetain(mem::transmute(reference)));
TCFType::wrap_under_create_rule(reference)
}
#[inline]
unsafe fn wrap_under_create_rule(obj: CTFontCollectionRef) -> CTFontCollection {
CTFontCollection {
obj: obj,
}
}
#[inline]
fn as_CFTypeRef(&self) -> CFTypeRef {
unsafe {
mem::transmute(self.as_concrete_TypeRef())
}
}
#[inline]
fn type_id() -> CFTypeID {
unsafe {
CTFontCollectionGetTypeID()
}
}
}
impl CTFontCollection {
pub fn get_descriptors(&self) -> CFArray {
// surprise! this function follows the Get rule, despite being named *Create*.
// So we have to addRef it to avoid CTFontCollection from double freeing it later.
unsafe {
TCFType::wrap_under_get_rule(CTFontCollectionCreateMatchingFontDescriptors(self.obj))
}
}
}
pub fn new_from_descriptors(descs: &CFArray) -> CTFontCollection {
unsafe {
let key: CFString = TCFType::wrap_under_get_rule(kCTFontCollectionRemoveDuplicatesOption);
let value = CFNumber::from_i64(1);
let options = CFDictionary::from_CFType_pairs(&[ (key.as_CFType(), value.as_CFType()) ]);
let font_collection_ref =
CTFontCollectionCreateWithFontDescriptors(descs.as_concrete_TypeRef(),
options.as_concrete_TypeRef());
TCFType::wrap_under_create_rule(font_collection_ref)
}
}
pub fn create_for_all_families() -> CTFontCollection {
unsafe {
let key: CFString = TCFType::wrap_under_get_rule(kCTFontCollectionRemoveDuplicatesOption);
let value = CFNumber::from_i64(1);
let options = CFDictionary::from_CFType_pairs(&[ (key.as_CFType(), value.as_CFType()) ]);
let font_collection_ref =
CTFontCollectionCreateFromAvailableFonts(options.as_concrete_TypeRef());
TCFType::wrap_under_create_rule(font_collection_ref)
}
}
pub fn create_for_family(family: &str) -> Option<CTFontCollection> {
use font_descriptor::kCTFontFamilyNameAttribute;
unsafe {
let family_attr: CFString = TCFType::wrap_under_get_rule(kCTFontFamilyNameAttribute);
let family_name: CFString = family.parse().unwrap();
let specified_attrs = CFDictionary::from_CFType_pairs(&[
(family_attr.as_CFType(), family_name.as_CFType())
]);
let wildcard_desc: CTFontDescriptor =
font_descriptor::new_from_attributes(&specified_attrs);
let mandatory_attrs = CFSet::from_slice(&[ family_attr.as_CFType() ]);
let matched_descs = CTFontDescriptorCreateMatchingFontDescriptors(
wildcard_desc.as_concrete_TypeRef(),
mandatory_attrs.as_concrete_TypeRef());
if matched_descs == ptr::null() {
return None;
}
let matched_descs: CFArray = TCFType::wrap_under_create_rule(matched_descs);
// I suppose one doesn't even need the CTFontCollection object at this point.
// But we stick descriptors into and out of it just to provide a nice wrapper API.
Some(new_from_descriptors(&matched_descs))
}
}
pub fn | () -> CFArray {
unsafe {
TCFType::wrap_under_create_rule(CTFontManagerCopyAvailableFontFamilyNames())
}
}
extern {
/*
* CTFontCollection.h
*/
static kCTFontCollectionRemoveDuplicatesOption: CFStringRef;
//fn CTFontCollectionCreateCopyWithFontDescriptors(original: CTFontCollectionRef,
// descriptors: CFArrayRef,
// options: CFDictionaryRef) -> CTFontCollectionRef;
fn CTFontCollectionCreateFromAvailableFonts(options: CFDictionaryRef) -> CTFontCollectionRef;
// this stupid function doesn't actually do any wildcard expansion;
// it just chooses the best match. Use
// CTFontDescriptorCreateMatchingDescriptors instead.
fn CTFontCollectionCreateMatchingFontDescriptors(collection: CTFontCollectionRef) -> CFArrayRef;
fn CTFontCollectionCreateWithFontDescriptors(descriptors: CFArrayRef,
options: CFDictionaryRef) -> CTFontCollectionRef;
//fn CTFontCollectionCreateMatchingFontDescriptorsSortedWithCallback;
fn CTFontCollectionGetTypeID() -> CFTypeID;
}
| get_family_names | identifier_name |
font_collection.rs | // Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use font_descriptor;
use font_descriptor::{CTFontDescriptor, CTFontDescriptorCreateMatchingFontDescriptors};
use font_manager::CTFontManagerCopyAvailableFontFamilyNames;
use core_foundation::array::{CFArray, CFArrayRef};
use core_foundation::base::{CFRelease, CFRetain, CFTypeID, CFTypeRef, TCFType};
use core_foundation::dictionary::{CFDictionary, CFDictionaryRef};
use core_foundation::number::CFNumber;
use core_foundation::set::CFSet;
use core_foundation::string::{CFString, CFStringRef};
use libc::c_void;
use std::mem;
use std::ptr;
#[repr(C)]
pub struct __CTFontCollection(c_void);
pub type CTFontCollectionRef = *const __CTFontCollection;
#[derive(Debug)]
pub struct CTFontCollection {
obj: CTFontCollectionRef,
}
impl Drop for CTFontCollection {
fn drop(&mut self) {
unsafe {
CFRelease(self.as_CFTypeRef())
}
}
}
impl TCFType<CTFontCollectionRef> for CTFontCollection {
#[inline]
fn as_concrete_TypeRef(&self) -> CTFontCollectionRef {
self.obj
}
#[inline]
unsafe fn wrap_under_get_rule(reference: CTFontCollectionRef) -> CTFontCollection {
let reference: CTFontCollectionRef = mem::transmute(CFRetain(mem::transmute(reference)));
TCFType::wrap_under_create_rule(reference)
}
#[inline]
unsafe fn wrap_under_create_rule(obj: CTFontCollectionRef) -> CTFontCollection {
CTFontCollection {
obj: obj,
}
}
#[inline]
fn as_CFTypeRef(&self) -> CFTypeRef {
unsafe {
mem::transmute(self.as_concrete_TypeRef())
}
}
#[inline]
fn type_id() -> CFTypeID {
unsafe {
CTFontCollectionGetTypeID()
}
}
}
impl CTFontCollection {
pub fn get_descriptors(&self) -> CFArray {
// surprise! this function follows the Get rule, despite being named *Create*.
// So we have to addRef it to avoid CTFontCollection from double freeing it later.
unsafe {
TCFType::wrap_under_get_rule(CTFontCollectionCreateMatchingFontDescriptors(self.obj))
}
}
}
pub fn new_from_descriptors(descs: &CFArray) -> CTFontCollection {
unsafe {
let key: CFString = TCFType::wrap_under_get_rule(kCTFontCollectionRemoveDuplicatesOption);
let value = CFNumber::from_i64(1);
let options = CFDictionary::from_CFType_pairs(&[ (key.as_CFType(), value.as_CFType()) ]);
let font_collection_ref =
CTFontCollectionCreateWithFontDescriptors(descs.as_concrete_TypeRef(),
options.as_concrete_TypeRef());
TCFType::wrap_under_create_rule(font_collection_ref)
}
}
pub fn create_for_all_families() -> CTFontCollection {
unsafe {
let key: CFString = TCFType::wrap_under_get_rule(kCTFontCollectionRemoveDuplicatesOption);
let value = CFNumber::from_i64(1);
let options = CFDictionary::from_CFType_pairs(&[ (key.as_CFType(), value.as_CFType()) ]);
let font_collection_ref =
CTFontCollectionCreateFromAvailableFonts(options.as_concrete_TypeRef());
TCFType::wrap_under_create_rule(font_collection_ref)
}
}
pub fn create_for_family(family: &str) -> Option<CTFontCollection> {
use font_descriptor::kCTFontFamilyNameAttribute;
unsafe {
let family_attr: CFString = TCFType::wrap_under_get_rule(kCTFontFamilyNameAttribute);
let family_name: CFString = family.parse().unwrap();
let specified_attrs = CFDictionary::from_CFType_pairs(&[
(family_attr.as_CFType(), family_name.as_CFType())
]);
let wildcard_desc: CTFontDescriptor =
font_descriptor::new_from_attributes(&specified_attrs);
let mandatory_attrs = CFSet::from_slice(&[ family_attr.as_CFType() ]);
let matched_descs = CTFontDescriptorCreateMatchingFontDescriptors(
wildcard_desc.as_concrete_TypeRef(),
mandatory_attrs.as_concrete_TypeRef());
if matched_descs == ptr::null() {
return None;
}
let matched_descs: CFArray = TCFType::wrap_under_create_rule(matched_descs);
// I suppose one doesn't even need the CTFontCollection object at this point.
// But we stick descriptors into and out of it just to provide a nice wrapper API.
Some(new_from_descriptors(&matched_descs))
}
}
pub fn get_family_names() -> CFArray {
unsafe {
TCFType::wrap_under_create_rule(CTFontManagerCopyAvailableFontFamilyNames())
}
}
extern {
/*
* CTFontCollection.h
*/
static kCTFontCollectionRemoveDuplicatesOption: CFStringRef;
//fn CTFontCollectionCreateCopyWithFontDescriptors(original: CTFontCollectionRef,
// descriptors: CFArrayRef,
// options: CFDictionaryRef) -> CTFontCollectionRef; | // this stupid function doesn't actually do any wildcard expansion;
// it just chooses the best match. Use
// CTFontDescriptorCreateMatchingDescriptors instead.
fn CTFontCollectionCreateMatchingFontDescriptors(collection: CTFontCollectionRef) -> CFArrayRef;
fn CTFontCollectionCreateWithFontDescriptors(descriptors: CFArrayRef,
options: CFDictionaryRef) -> CTFontCollectionRef;
//fn CTFontCollectionCreateMatchingFontDescriptorsSortedWithCallback;
fn CTFontCollectionGetTypeID() -> CFTypeID;
} | fn CTFontCollectionCreateFromAvailableFonts(options: CFDictionaryRef) -> CTFontCollectionRef; | random_line_split |
font_collection.rs | // Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use font_descriptor;
use font_descriptor::{CTFontDescriptor, CTFontDescriptorCreateMatchingFontDescriptors};
use font_manager::CTFontManagerCopyAvailableFontFamilyNames;
use core_foundation::array::{CFArray, CFArrayRef};
use core_foundation::base::{CFRelease, CFRetain, CFTypeID, CFTypeRef, TCFType};
use core_foundation::dictionary::{CFDictionary, CFDictionaryRef};
use core_foundation::number::CFNumber;
use core_foundation::set::CFSet;
use core_foundation::string::{CFString, CFStringRef};
use libc::c_void;
use std::mem;
use std::ptr;
#[repr(C)]
pub struct __CTFontCollection(c_void);
pub type CTFontCollectionRef = *const __CTFontCollection;
#[derive(Debug)]
pub struct CTFontCollection {
obj: CTFontCollectionRef,
}
impl Drop for CTFontCollection {
fn drop(&mut self) {
unsafe {
CFRelease(self.as_CFTypeRef())
}
}
}
impl TCFType<CTFontCollectionRef> for CTFontCollection {
#[inline]
fn as_concrete_TypeRef(&self) -> CTFontCollectionRef {
self.obj
}
#[inline]
unsafe fn wrap_under_get_rule(reference: CTFontCollectionRef) -> CTFontCollection {
let reference: CTFontCollectionRef = mem::transmute(CFRetain(mem::transmute(reference)));
TCFType::wrap_under_create_rule(reference)
}
#[inline]
unsafe fn wrap_under_create_rule(obj: CTFontCollectionRef) -> CTFontCollection {
CTFontCollection {
obj: obj,
}
}
#[inline]
fn as_CFTypeRef(&self) -> CFTypeRef {
unsafe {
mem::transmute(self.as_concrete_TypeRef())
}
}
#[inline]
fn type_id() -> CFTypeID {
unsafe {
CTFontCollectionGetTypeID()
}
}
}
impl CTFontCollection {
pub fn get_descriptors(&self) -> CFArray {
// surprise! this function follows the Get rule, despite being named *Create*.
// So we have to addRef it to avoid CTFontCollection from double freeing it later.
unsafe {
TCFType::wrap_under_get_rule(CTFontCollectionCreateMatchingFontDescriptors(self.obj))
}
}
}
pub fn new_from_descriptors(descs: &CFArray) -> CTFontCollection {
unsafe {
let key: CFString = TCFType::wrap_under_get_rule(kCTFontCollectionRemoveDuplicatesOption);
let value = CFNumber::from_i64(1);
let options = CFDictionary::from_CFType_pairs(&[ (key.as_CFType(), value.as_CFType()) ]);
let font_collection_ref =
CTFontCollectionCreateWithFontDescriptors(descs.as_concrete_TypeRef(),
options.as_concrete_TypeRef());
TCFType::wrap_under_create_rule(font_collection_ref)
}
}
pub fn create_for_all_families() -> CTFontCollection {
unsafe {
let key: CFString = TCFType::wrap_under_get_rule(kCTFontCollectionRemoveDuplicatesOption);
let value = CFNumber::from_i64(1);
let options = CFDictionary::from_CFType_pairs(&[ (key.as_CFType(), value.as_CFType()) ]);
let font_collection_ref =
CTFontCollectionCreateFromAvailableFonts(options.as_concrete_TypeRef());
TCFType::wrap_under_create_rule(font_collection_ref)
}
}
pub fn create_for_family(family: &str) -> Option<CTFontCollection> {
use font_descriptor::kCTFontFamilyNameAttribute;
unsafe {
let family_attr: CFString = TCFType::wrap_under_get_rule(kCTFontFamilyNameAttribute);
let family_name: CFString = family.parse().unwrap();
let specified_attrs = CFDictionary::from_CFType_pairs(&[
(family_attr.as_CFType(), family_name.as_CFType())
]);
let wildcard_desc: CTFontDescriptor =
font_descriptor::new_from_attributes(&specified_attrs);
let mandatory_attrs = CFSet::from_slice(&[ family_attr.as_CFType() ]);
let matched_descs = CTFontDescriptorCreateMatchingFontDescriptors(
wildcard_desc.as_concrete_TypeRef(),
mandatory_attrs.as_concrete_TypeRef());
if matched_descs == ptr::null() |
let matched_descs: CFArray = TCFType::wrap_under_create_rule(matched_descs);
// I suppose one doesn't even need the CTFontCollection object at this point.
// But we stick descriptors into and out of it just to provide a nice wrapper API.
Some(new_from_descriptors(&matched_descs))
}
}
pub fn get_family_names() -> CFArray {
unsafe {
TCFType::wrap_under_create_rule(CTFontManagerCopyAvailableFontFamilyNames())
}
}
extern {
/*
* CTFontCollection.h
*/
static kCTFontCollectionRemoveDuplicatesOption: CFStringRef;
//fn CTFontCollectionCreateCopyWithFontDescriptors(original: CTFontCollectionRef,
// descriptors: CFArrayRef,
// options: CFDictionaryRef) -> CTFontCollectionRef;
fn CTFontCollectionCreateFromAvailableFonts(options: CFDictionaryRef) -> CTFontCollectionRef;
// this stupid function doesn't actually do any wildcard expansion;
// it just chooses the best match. Use
// CTFontDescriptorCreateMatchingDescriptors instead.
fn CTFontCollectionCreateMatchingFontDescriptors(collection: CTFontCollectionRef) -> CFArrayRef;
fn CTFontCollectionCreateWithFontDescriptors(descriptors: CFArrayRef,
options: CFDictionaryRef) -> CTFontCollectionRef;
//fn CTFontCollectionCreateMatchingFontDescriptorsSortedWithCallback;
fn CTFontCollectionGetTypeID() -> CFTypeID;
}
| {
return None;
} | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.