file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
build_gecko.rs
|
path
};
static ref SEARCH_PATHS: Vec<PathBuf> = vec![
DISTDIR_PATH.join("include"),
DISTDIR_PATH.join("include/nspr"),
];
static ref ADDED_PATHS: Mutex<HashSet<PathBuf>> = Mutex::new(HashSet::new());
static ref LAST_MODIFIED: Mutex<SystemTime> =
Mutex::new(get_modified_time(&env::current_exe().unwrap())
.expect("Failed to get modified time of executable"));
}
fn get_modified_time(file: &Path) -> Option<SystemTime> {
file.metadata().and_then(|m| m.modified()).ok()
}
fn update_last_modified(file: &Path) {
let modified = get_modified_time(file).expect("Couldn't get file modification time");
let mut last_modified = LAST_MODIFIED.lock().unwrap();
*last_modified = cmp::max(modified, *last_modified);
}
fn search_include(name: &str) -> Option<PathBuf> {
for path in SEARCH_PATHS.iter() {
let file = path.join(name);
if file.is_file() {
update_last_modified(&file);
return Some(file);
}
}
None
}
fn add_headers_recursively(path: PathBuf, added_paths: &mut HashSet<PathBuf>) {
if added_paths.contains(&path) {
return;
}
let mut file = File::open(&path).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
added_paths.insert(path);
// Find all includes and add them recursively
for cap in INCLUDE_RE.captures_iter(&content) {
if let Some(path) = search_include(cap.get(1).unwrap().as_str()) {
add_headers_recursively(path, added_paths);
}
}
}
fn add_include(name: &str) -> String {
let mut added_paths = ADDED_PATHS.lock().unwrap();
let file = search_include(name).expect("Include not found!");
let result = String::from(file.to_str().unwrap());
add_headers_recursively(file, &mut *added_paths);
result
}
|
fn include<T: Into<String>>(self, file: T) -> Builder;
fn zero_size_type(self, ty: &str, structs_list: &HashSet<&str>) -> Builder;
fn borrowed_type(self, ty: &str) -> Builder;
fn mutable_borrowed_type(self, ty: &str) -> Builder;
}
fn add_clang_args(mut builder: Builder, config: &Table, matched_os: &mut bool) -> Builder {
fn add_args(mut builder: Builder, values: &[toml::Value]) -> Builder {
for item in values.iter() {
builder = builder.clang_arg(item.as_str().expect("Expect string in list"));
}
builder
}
for (k, v) in config.iter() {
if k == "args" {
builder = add_args(builder, v.as_array().unwrap().as_slice());
continue;
}
let equal_idx = k.find('=').expect(&format!("Invalid key: {}", k));
let (target_type, target_value) = k.split_at(equal_idx);
if TARGET_INFO[target_type]!= target_value[1..] {
continue;
}
if target_type == "os" {
*matched_os = true;
}
builder = match *v {
toml::Value::Table(ref table) => add_clang_args(builder, table, matched_os),
toml::Value::Array(ref array) => add_args(builder, array),
_ => panic!("Unknown type"),
};
}
builder
}
impl BuilderExt for Builder {
fn get_initial_builder() -> Builder {
use bindgen::RustTarget;
// Disable rust unions, because we replace some types inside of
// them.
let mut builder = Builder::default().rust_target(RustTarget::Stable_1_0);
let rustfmt_path = env::var_os("RUSTFMT")
// This can be replaced with
// >.filter(|p|!p.is_empty()).map(PathBuf::from)
// once we can use 1.27+.
.and_then(|p| {
if p.is_empty() {
None
} else {
Some(PathBuf::from(p))
}
});
if let Some(path) = rustfmt_path {
builder = builder.with_rustfmt(path);
}
for dir in SEARCH_PATHS.iter() {
builder = builder.clang_arg("-I").clang_arg(dir.to_str().unwrap());
}
builder = builder.include(add_include("mozilla-config.h"));
if env::var("CARGO_FEATURE_GECKO_DEBUG").is_ok() {
builder = builder.clang_arg("-DDEBUG=1").clang_arg("-DJS_DEBUG=1");
}
let mut matched_os = false;
let build_config = CONFIG["build"].as_table().expect("Malformed config file");
builder = add_clang_args(builder, build_config, &mut matched_os);
let build_config = BUILD_CONFIG["build"]
.as_table()
.expect("Malformed config file");
builder = add_clang_args(builder, build_config, &mut matched_os);
if!matched_os {
panic!("Unknown platform");
}
builder
}
fn include<T: Into<String>>(self, file: T) -> Builder {
self.clang_arg("-include").clang_arg(file)
}
// This makes an FFI-safe void type that can't be matched on
// &VoidType is UB to have, because you can match on it
// to produce a reachable unreachable. If it's wrapped in
// a struct as a private field it becomes okay again
//
// Not 100% sure of how safe this is, but it's what we're using
// in the XPCOM ffi too
// https://github.com/nikomatsakis/rust-memory-model/issues/2
fn zero_size_type(self, ty: &str, structs_list: &HashSet<&str>) -> Builder {
if!structs_list.contains(ty) {
self.blacklist_type(ty)
.raw_line(format!("enum {}Void {{ }}", ty))
.raw_line(format!("pub struct {0}({0}Void);", ty))
} else {
self
}
}
fn borrowed_type(self, ty: &str) -> Builder {
self.blacklist_type(format!("{}Borrowed", ty))
.raw_line(format!("pub type {0}Borrowed<'a> = &'a {0};", ty))
.blacklist_type(format!("{}BorrowedOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedOrNull<'a> = Option<&'a {0}>;",
ty
))
}
fn mutable_borrowed_type(self, ty: &str) -> Builder {
self.borrowed_type(ty)
.blacklist_type(format!("{}BorrowedMut", ty))
.raw_line(format!("pub type {0}BorrowedMut<'a> = &'a mut {0};", ty))
.blacklist_type(format!("{}BorrowedMutOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedMutOrNull<'a> = Option<&'a mut {0}>;",
ty
))
}
}
struct Fixup {
pat: String,
rep: String,
}
fn write_binding_file(builder: Builder, file: &str, fixups: &[Fixup]) {
let out_file = OUTDIR_PATH.join(file);
if let Some(modified) = get_modified_time(&out_file) {
// Don't generate the file if nothing it depends on was modified.
let last_modified = LAST_MODIFIED.lock().unwrap();
if *last_modified <= modified {
return;
}
}
let command_line_opts = builder.command_line_flags();
let result = builder.generate();
let mut result = match result {
Ok(bindings) => bindings.to_string(),
Err(_) => {
panic!(
"Failed to generate bindings, flags: {:?}",
command_line_opts
);
},
};
for fixup in fixups.iter() {
result = Regex::new(&fixup.pat)
.unwrap()
.replace_all(&result, &*fixup.rep)
.into_owned()
.into();
}
let bytes = result.into_bytes();
File::create(&out_file)
.unwrap()
.write_all(&bytes)
.expect("Unable to write output");
}
fn get_arc_types() -> Vec<String> {
// Read the file
let mut list_file = File::open(DISTDIR_PATH.join("include/mozilla/ServoArcTypeList.h"))
.expect("Unable to open ServoArcTypeList.h");
let mut content = String::new();
list_file
.read_to_string(&mut content)
.expect("Fail to read ServoArcTypeList.h");
// Remove comments
let block_comment_re = Regex::new(r#"(?s)/\*.*?\*/"#).unwrap();
let content = block_comment_re.replace_all(&content, "");
// Extract the list
let re = Regex::new(r#"^SERVO_ARC_TYPE\(\w+,\s*(\w+)\)$"#).unwrap();
content
.lines()
.map(|line| line.trim())
.filter(|line|!line.is_empty())
.map(|line| {
re.captures(&line)
.expect(&format!(
"Unrecognized line in ServoArcTypeList.h: '{}'",
line
))
.get(1)
.unwrap()
.as_str()
.to_string()
})
.collect()
}
struct BuilderWithConfig<'a> {
builder: Builder,
config: &'a Table,
used_keys: HashSet<&'static str>,
}
impl<'a> BuilderWithConfig<'a> {
fn new(builder: Builder, config: &'a Table) -> Self {
BuilderWithConfig {
builder,
config,
used_keys: HashSet::new(),
}
}
fn handle_list<F>(self, key: &'static str, func: F) -> BuilderWithConfig<'a>
where
F: FnOnce(Builder, slice::Iter<'a, toml::Value>) -> Builder,
{
let mut builder = self.builder;
let config = self.config;
let mut used_keys = self.used_keys;
if let Some(list) = config.get(key) {
used_keys.insert(key);
builder = func(builder, list.as_array().unwrap().as_slice().iter());
}
BuilderWithConfig {
builder,
config,
used_keys,
}
}
fn handle_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a toml::Value) -> Builder,
{
self.handle_list(key, |b, iter| iter.fold(b, |b, item| func(b, item)))
}
fn handle_str_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a str) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_str().unwrap()))
}
fn handle_table_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a Table) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_table().unwrap()))
}
fn handle_common(self, fixups: &mut Vec<Fixup>) -> BuilderWithConfig<'a> {
self.handle_str_items("headers", |b, item| b.header(add_include(item)))
.handle_str_items("raw-lines", |b, item| b.raw_line(item))
.handle_str_items("hide-types", |b, item| b.blacklist_type(item))
.handle_table_items("fixups", |builder, item| {
fixups.push(Fixup {
pat: item["pat"].as_str().unwrap().into(),
rep: item["rep"].as_str().unwrap().into(),
});
builder
})
}
fn get_builder(self) -> Builder {
for key in self.config.keys() {
if!self.used_keys.contains(key.as_str()) {
panic!(format!("Unknown key: {}", key));
}
}
self.builder
}
}
fn generate_structs() {
let builder = Builder::get_initial_builder()
.enable_cxx_namespaces()
.with_codegen_config(CodegenConfig {
types: true,
vars: true,
..CodegenConfig::nothing()
});
let mut fixups = vec![];
let builder = BuilderWithConfig::new(builder, CONFIG["structs"].as_table().unwrap())
.handle_common(&mut fixups)
.handle_str_items("bitfield-enums", |b, item| b.bitfield_enum(item))
.handle_str_items("rusty-enums", |b, item| b.rustified_enum(item))
.handle_str_items("whitelist-vars", |b, item| b.whitelist_var(item))
.handle_str_items("whitelist-types", |b, item| b.whitelist_type(item))
.handle_str_items("opaque-types", |b, item| b.opaque_type(item))
.handle_table_items("mapped-generic-types", |builder, item| {
let generic = item["generic"].as_bool().unwrap();
let gecko = item["gecko"].as_str().unwrap();
let servo = item["servo"].as_str().unwrap();
let gecko_name = gecko.rsplit("::").next().unwrap();
let gecko = gecko
.split("::")
.map(|s| format!("\\s*{}\\s*", s))
.collect::<Vec<_>>()
.join("::");
fixups.push(Fixup {
pat: format!("\\broot\\s*::\\s*{}\\b", gecko),
rep: format!("::gecko_bindings::structs::{}", gecko_name),
});
builder.blacklist_type(gecko).raw_line(format!(
"pub type {0}{2} = {1}{2};",
gecko_name,
servo,
if generic { "<T>" } else { "" }
))
})
.get_builder();
write_binding_file(builder, STRUCTS_FILE, &fixups);
}
fn setup_logging() -> bool {
use log;
struct BuildLogger {
file: Option<Mutex<fs::File>>,
filter: String,
}
impl log::Log for BuildLogger {
fn enabled(&self, meta: &log::Metadata) -> bool {
self.file.is_some() && meta.target().contains(&self.filter)
}
fn log(&self, record: &log::Record) {
if!self.enabled(record.metadata()) {
return;
}
let mut file = self.file.as_ref().unwrap().lock().unwrap();
let _ = writeln!(
file,
"{} - {} - {} @ {}:{}",
record.level(),
record.target(),
record.args(),
record.file().unwrap_or("<unknown>"),
record.line().unwrap_or(0)
);
}
fn flush(&self) {
if let Some(ref file) = self.file {
file.lock().unwrap().flush().unwrap();
}
}
}
if let Some(path) = env::var_os("STYLO_BUILD_LOG") {
log::set_max_level(log::LevelFilter::Debug);
log::set_boxed_logger(Box::new(BuildLogger {
file: fs::File::create(path).ok().map(Mutex::new),
filter: env::var("STYLO_BUILD_FILTER")
.ok()
.unwrap_or_else(|| "bindgen".to_owned()),
})).expect("Failed to set logger.");
true
} else {
false
}
}
fn generate_bindings() {
|
trait BuilderExt {
fn get_initial_builder() -> Builder;
|
random_line_split
|
build_gecko.rs
|
path
};
static ref SEARCH_PATHS: Vec<PathBuf> = vec![
DISTDIR_PATH.join("include"),
DISTDIR_PATH.join("include/nspr"),
];
static ref ADDED_PATHS: Mutex<HashSet<PathBuf>> = Mutex::new(HashSet::new());
static ref LAST_MODIFIED: Mutex<SystemTime> =
Mutex::new(get_modified_time(&env::current_exe().unwrap())
.expect("Failed to get modified time of executable"));
}
fn get_modified_time(file: &Path) -> Option<SystemTime> {
file.metadata().and_then(|m| m.modified()).ok()
}
fn update_last_modified(file: &Path) {
let modified = get_modified_time(file).expect("Couldn't get file modification time");
let mut last_modified = LAST_MODIFIED.lock().unwrap();
*last_modified = cmp::max(modified, *last_modified);
}
fn search_include(name: &str) -> Option<PathBuf> {
for path in SEARCH_PATHS.iter() {
let file = path.join(name);
if file.is_file() {
update_last_modified(&file);
return Some(file);
}
}
None
}
fn add_headers_recursively(path: PathBuf, added_paths: &mut HashSet<PathBuf>) {
if added_paths.contains(&path) {
return;
}
let mut file = File::open(&path).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
added_paths.insert(path);
// Find all includes and add them recursively
for cap in INCLUDE_RE.captures_iter(&content) {
if let Some(path) = search_include(cap.get(1).unwrap().as_str()) {
add_headers_recursively(path, added_paths);
}
}
}
fn add_include(name: &str) -> String {
let mut added_paths = ADDED_PATHS.lock().unwrap();
let file = search_include(name).expect("Include not found!");
let result = String::from(file.to_str().unwrap());
add_headers_recursively(file, &mut *added_paths);
result
}
trait BuilderExt {
fn get_initial_builder() -> Builder;
fn include<T: Into<String>>(self, file: T) -> Builder;
fn zero_size_type(self, ty: &str, structs_list: &HashSet<&str>) -> Builder;
fn borrowed_type(self, ty: &str) -> Builder;
fn mutable_borrowed_type(self, ty: &str) -> Builder;
}
fn add_clang_args(mut builder: Builder, config: &Table, matched_os: &mut bool) -> Builder {
fn add_args(mut builder: Builder, values: &[toml::Value]) -> Builder {
for item in values.iter() {
builder = builder.clang_arg(item.as_str().expect("Expect string in list"));
}
builder
}
for (k, v) in config.iter() {
if k == "args" {
builder = add_args(builder, v.as_array().unwrap().as_slice());
continue;
}
let equal_idx = k.find('=').expect(&format!("Invalid key: {}", k));
let (target_type, target_value) = k.split_at(equal_idx);
if TARGET_INFO[target_type]!= target_value[1..] {
continue;
}
if target_type == "os" {
*matched_os = true;
}
builder = match *v {
toml::Value::Table(ref table) => add_clang_args(builder, table, matched_os),
toml::Value::Array(ref array) => add_args(builder, array),
_ => panic!("Unknown type"),
};
}
builder
}
impl BuilderExt for Builder {
fn get_initial_builder() -> Builder {
use bindgen::RustTarget;
// Disable rust unions, because we replace some types inside of
// them.
let mut builder = Builder::default().rust_target(RustTarget::Stable_1_0);
let rustfmt_path = env::var_os("RUSTFMT")
// This can be replaced with
// >.filter(|p|!p.is_empty()).map(PathBuf::from)
// once we can use 1.27+.
.and_then(|p| {
if p.is_empty() {
None
} else {
Some(PathBuf::from(p))
}
});
if let Some(path) = rustfmt_path {
builder = builder.with_rustfmt(path);
}
for dir in SEARCH_PATHS.iter() {
builder = builder.clang_arg("-I").clang_arg(dir.to_str().unwrap());
}
builder = builder.include(add_include("mozilla-config.h"));
if env::var("CARGO_FEATURE_GECKO_DEBUG").is_ok() {
builder = builder.clang_arg("-DDEBUG=1").clang_arg("-DJS_DEBUG=1");
}
let mut matched_os = false;
let build_config = CONFIG["build"].as_table().expect("Malformed config file");
builder = add_clang_args(builder, build_config, &mut matched_os);
let build_config = BUILD_CONFIG["build"]
.as_table()
.expect("Malformed config file");
builder = add_clang_args(builder, build_config, &mut matched_os);
if!matched_os {
panic!("Unknown platform");
}
builder
}
fn include<T: Into<String>>(self, file: T) -> Builder {
self.clang_arg("-include").clang_arg(file)
}
// This makes an FFI-safe void type that can't be matched on
// &VoidType is UB to have, because you can match on it
// to produce a reachable unreachable. If it's wrapped in
// a struct as a private field it becomes okay again
//
// Not 100% sure of how safe this is, but it's what we're using
// in the XPCOM ffi too
// https://github.com/nikomatsakis/rust-memory-model/issues/2
fn zero_size_type(self, ty: &str, structs_list: &HashSet<&str>) -> Builder {
if!structs_list.contains(ty) {
self.blacklist_type(ty)
.raw_line(format!("enum {}Void {{ }}", ty))
.raw_line(format!("pub struct {0}({0}Void);", ty))
} else {
self
}
}
fn borrowed_type(self, ty: &str) -> Builder {
self.blacklist_type(format!("{}Borrowed", ty))
.raw_line(format!("pub type {0}Borrowed<'a> = &'a {0};", ty))
.blacklist_type(format!("{}BorrowedOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedOrNull<'a> = Option<&'a {0}>;",
ty
))
}
fn mutable_borrowed_type(self, ty: &str) -> Builder {
self.borrowed_type(ty)
.blacklist_type(format!("{}BorrowedMut", ty))
.raw_line(format!("pub type {0}BorrowedMut<'a> = &'a mut {0};", ty))
.blacklist_type(format!("{}BorrowedMutOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedMutOrNull<'a> = Option<&'a mut {0}>;",
ty
))
}
}
struct Fixup {
pat: String,
rep: String,
}
fn write_binding_file(builder: Builder, file: &str, fixups: &[Fixup]) {
let out_file = OUTDIR_PATH.join(file);
if let Some(modified) = get_modified_time(&out_file) {
// Don't generate the file if nothing it depends on was modified.
let last_modified = LAST_MODIFIED.lock().unwrap();
if *last_modified <= modified {
return;
}
}
let command_line_opts = builder.command_line_flags();
let result = builder.generate();
let mut result = match result {
Ok(bindings) => bindings.to_string(),
Err(_) => {
panic!(
"Failed to generate bindings, flags: {:?}",
command_line_opts
);
},
};
for fixup in fixups.iter() {
result = Regex::new(&fixup.pat)
.unwrap()
.replace_all(&result, &*fixup.rep)
.into_owned()
.into();
}
let bytes = result.into_bytes();
File::create(&out_file)
.unwrap()
.write_all(&bytes)
.expect("Unable to write output");
}
fn get_arc_types() -> Vec<String> {
// Read the file
let mut list_file = File::open(DISTDIR_PATH.join("include/mozilla/ServoArcTypeList.h"))
.expect("Unable to open ServoArcTypeList.h");
let mut content = String::new();
list_file
.read_to_string(&mut content)
.expect("Fail to read ServoArcTypeList.h");
// Remove comments
let block_comment_re = Regex::new(r#"(?s)/\*.*?\*/"#).unwrap();
let content = block_comment_re.replace_all(&content, "");
// Extract the list
let re = Regex::new(r#"^SERVO_ARC_TYPE\(\w+,\s*(\w+)\)$"#).unwrap();
content
.lines()
.map(|line| line.trim())
.filter(|line|!line.is_empty())
.map(|line| {
re.captures(&line)
.expect(&format!(
"Unrecognized line in ServoArcTypeList.h: '{}'",
line
))
.get(1)
.unwrap()
.as_str()
.to_string()
})
.collect()
}
struct BuilderWithConfig<'a> {
builder: Builder,
config: &'a Table,
used_keys: HashSet<&'static str>,
}
impl<'a> BuilderWithConfig<'a> {
fn new(builder: Builder, config: &'a Table) -> Self {
BuilderWithConfig {
builder,
config,
used_keys: HashSet::new(),
}
}
fn handle_list<F>(self, key: &'static str, func: F) -> BuilderWithConfig<'a>
where
F: FnOnce(Builder, slice::Iter<'a, toml::Value>) -> Builder,
{
let mut builder = self.builder;
let config = self.config;
let mut used_keys = self.used_keys;
if let Some(list) = config.get(key) {
used_keys.insert(key);
builder = func(builder, list.as_array().unwrap().as_slice().iter());
}
BuilderWithConfig {
builder,
config,
used_keys,
}
}
fn handle_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a toml::Value) -> Builder,
{
self.handle_list(key, |b, iter| iter.fold(b, |b, item| func(b, item)))
}
fn handle_str_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a str) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_str().unwrap()))
}
fn handle_table_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a Table) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_table().unwrap()))
}
fn handle_common(self, fixups: &mut Vec<Fixup>) -> BuilderWithConfig<'a> {
self.handle_str_items("headers", |b, item| b.header(add_include(item)))
.handle_str_items("raw-lines", |b, item| b.raw_line(item))
.handle_str_items("hide-types", |b, item| b.blacklist_type(item))
.handle_table_items("fixups", |builder, item| {
fixups.push(Fixup {
pat: item["pat"].as_str().unwrap().into(),
rep: item["rep"].as_str().unwrap().into(),
});
builder
})
}
fn get_builder(self) -> Builder {
for key in self.config.keys() {
if!self.used_keys.contains(key.as_str()) {
panic!(format!("Unknown key: {}", key));
}
}
self.builder
}
}
fn generate_structs() {
let builder = Builder::get_initial_builder()
.enable_cxx_namespaces()
.with_codegen_config(CodegenConfig {
types: true,
vars: true,
..CodegenConfig::nothing()
});
let mut fixups = vec![];
let builder = BuilderWithConfig::new(builder, CONFIG["structs"].as_table().unwrap())
.handle_common(&mut fixups)
.handle_str_items("bitfield-enums", |b, item| b.bitfield_enum(item))
.handle_str_items("rusty-enums", |b, item| b.rustified_enum(item))
.handle_str_items("whitelist-vars", |b, item| b.whitelist_var(item))
.handle_str_items("whitelist-types", |b, item| b.whitelist_type(item))
.handle_str_items("opaque-types", |b, item| b.opaque_type(item))
.handle_table_items("mapped-generic-types", |builder, item| {
let generic = item["generic"].as_bool().unwrap();
let gecko = item["gecko"].as_str().unwrap();
let servo = item["servo"].as_str().unwrap();
let gecko_name = gecko.rsplit("::").next().unwrap();
let gecko = gecko
.split("::")
.map(|s| format!("\\s*{}\\s*", s))
.collect::<Vec<_>>()
.join("::");
fixups.push(Fixup {
pat: format!("\\broot\\s*::\\s*{}\\b", gecko),
rep: format!("::gecko_bindings::structs::{}", gecko_name),
});
builder.blacklist_type(gecko).raw_line(format!(
"pub type {0}{2} = {1}{2};",
gecko_name,
servo,
if generic { "<T>" } else { "" }
))
})
.get_builder();
write_binding_file(builder, STRUCTS_FILE, &fixups);
}
fn setup_logging() -> bool {
use log;
struct BuildLogger {
file: Option<Mutex<fs::File>>,
filter: String,
}
impl log::Log for BuildLogger {
fn enabled(&self, meta: &log::Metadata) -> bool {
self.file.is_some() && meta.target().contains(&self.filter)
}
fn log(&self, record: &log::Record) {
if!self.enabled(record.metadata()) {
return;
}
let mut file = self.file.as_ref().unwrap().lock().unwrap();
let _ = writeln!(
file,
"{} - {} - {} @ {}:{}",
record.level(),
record.target(),
record.args(),
record.file().unwrap_or("<unknown>"),
record.line().unwrap_or(0)
);
}
fn flush(&self) {
if let Some(ref file) = self.file
|
}
}
if let Some(path) = env::var_os("STYLO_BUILD_LOG") {
log::set_max_level(log::LevelFilter::Debug);
log::set_boxed_logger(Box::new(BuildLogger {
file: fs::File::create(path).ok().map(Mutex::new),
filter: env::var("STYLO_BUILD_FILTER")
.ok()
.unwrap_or_else(|| "bindgen".to_owned()),
})).expect("Failed to set logger.");
true
} else {
false
}
}
fn generate_bindings
|
{
file.lock().unwrap().flush().unwrap();
}
|
conditional_block
|
shell.rs
|
use std::fmt;
use std::io::prelude::*;
use termcolor::Color::{Cyan, Green, Red, Yellow};
use termcolor::{self, Color, ColorSpec, StandardStream, WriteColor};
use crate::util::errors::CargoResult;
/// The requested verbosity of output.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Verbosity {
Verbose,
Normal,
Quiet,
}
/// An abstraction around a `Write`able object that remembers preferences for output verbosity and
/// color.
pub struct Shell {
/// the `Write`able object, either with or without color support (represented by different enum
/// variants)
err: ShellOut,
/// How verbose messages should be
verbosity: Verbosity,
/// Flag that indicates the current line needs to be cleared before
/// printing. Used when a progress bar is currently displayed.
needs_clear: bool,
}
impl fmt::Debug for Shell {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.err {
ShellOut::Write(_) => f
.debug_struct("Shell")
.field("verbosity", &self.verbosity)
.finish(),
ShellOut::Stream { color_choice,.. } => f
.debug_struct("Shell")
.field("verbosity", &self.verbosity)
.field("color_choice", &color_choice)
.finish(),
}
}
}
/// A `Write`able object, either with or without color support
enum ShellOut {
/// A plain write object without color support
Write(Box<dyn Write>),
/// Color-enabled stdio, with information on whether color should be used
Stream {
stream: StandardStream,
tty: bool,
color_choice: ColorChoice,
},
}
/// Whether messages should use color output
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum ColorChoice {
/// Force color output
Always,
/// Force disable color output
Never,
/// Intelligently guess whether to use color output
CargoAuto,
}
impl Shell {
/// Creates a new shell (color choice and verbosity), defaulting to 'auto' color and verbose
/// output.
pub fn new() -> Shell {
Shell {
err: ShellOut::Stream {
stream: StandardStream::stderr(ColorChoice::CargoAuto.to_termcolor_color_choice()),
color_choice: ColorChoice::CargoAuto,
tty: atty::is(atty::Stream::Stderr),
},
verbosity: Verbosity::Verbose,
needs_clear: false,
}
}
/// Creates a shell from a plain writable object, with no color, and max verbosity.
pub fn from_write(out: Box<dyn Write>) -> Shell {
Shell {
err: ShellOut::Write(out),
verbosity: Verbosity::Verbose,
needs_clear: false,
}
}
/// Prints a message, where the status will have `color` color, and can be justified. The
/// messages follows without color.
fn print(
&mut self,
status: &dyn fmt::Display,
message: Option<&dyn fmt::Display>,
color: Color,
justified: bool,
) -> CargoResult<()> {
match self.verbosity {
Verbosity::Quiet => Ok(()),
_ => {
if self.needs_clear {
self.err_erase_line();
}
self.err.print(status, message, color, justified)
}
}
}
pub fn stdout_println(&mut self, message: impl fmt::Display) {
if self.needs_clear {
self.err_erase_line();
}
println!("{}", message);
}
/// Sets whether the next print should clear the current line.
pub fn set_needs_clear(&mut self, needs_clear: bool) {
self.needs_clear = needs_clear;
}
/// Returns `true` if the `needs_clear` flag is unset.
pub fn is_cleared(&self) -> bool {
!self.needs_clear
}
/// Returns the width of the terminal in spaces, if any.
pub fn err_width(&self) -> Option<usize> {
match self.err {
ShellOut::Stream { tty: true,.. } => imp::stderr_width(),
_ => None,
}
}
/// Returns `true` if stderr is a tty.
pub fn is_err_tty(&self) -> bool {
match self.err {
ShellOut::Stream { tty,.. } => tty,
_ => false,
}
}
/// Gets a reference to the underlying writer.
pub fn err(&mut self) -> &mut dyn Write {
if self.needs_clear {
self.err_erase_line();
}
self.err.as_write()
}
/// Erase from cursor to end of line.
pub fn err_erase_line(&mut self) {
if let ShellOut::Stream { tty: true,.. } = self.err {
imp::err_erase_line(self);
self.needs_clear = false;
}
}
/// Shortcut to right-align and color green a status message.
pub fn status<T, U>(&mut self, status: T, message: U) -> CargoResult<()>
where
T: fmt::Display,
U: fmt::Display,
{
self.print(&status, Some(&message), Green, true)
}
pub fn status_header<T>(&mut self, status: T) -> CargoResult<()>
where
T: fmt::Display,
{
self.print(&status, None, Cyan, true)
}
/// Shortcut to right-align a status message.
pub fn status_with_color<T, U>(
&mut self,
status: T,
message: U,
color: Color,
) -> CargoResult<()>
where
T: fmt::Display,
U: fmt::Display,
{
self.print(&status, Some(&message), color, true)
}
/// Runs the callback only if we are in verbose mode.
pub fn verbose<F>(&mut self, mut callback: F) -> CargoResult<()>
where
F: FnMut(&mut Shell) -> CargoResult<()>,
{
match self.verbosity {
Verbosity::Verbose => callback(self),
_ => Ok(()),
}
}
/// Runs the callback if we are not in verbose mode.
pub fn concise<F>(&mut self, mut callback: F) -> CargoResult<()>
where
F: FnMut(&mut Shell) -> CargoResult<()>,
{
match self.verbosity {
Verbosity::Verbose => Ok(()),
_ => callback(self),
}
}
/// Prints a red 'error' message.
pub fn error<T: fmt::Display>(&mut self, message: T) -> CargoResult<()> {
if self.needs_clear {
self.err_erase_line();
}
self.err.print(&"error", Some(&message), Red, false)
}
/// Prints an amber 'warning' message.
pub fn warn<T: fmt::Display>(&mut self, message: T) -> CargoResult<()> {
match self.verbosity {
Verbosity::Quiet => Ok(()),
_ => self.print(&"warning", Some(&message), Yellow, false),
}
}
/// Prints a cyan 'note' message.
pub fn note<T: fmt::Display>(&mut self, message: T) -> CargoResult<()> {
self.print(&"note", Some(&message), Cyan, false)
}
/// Updates the verbosity of the shell.
pub fn set_verbosity(&mut self, verbosity: Verbosity) {
self.verbosity = verbosity;
}
/// Gets the verbosity of the shell.
pub fn verbosity(&self) -> Verbosity {
self.verbosity
}
/// Updates the color choice (always, never, or auto) from a string..
pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> {
if let ShellOut::Stream {
ref mut stream,
ref mut color_choice,
..
} = self.err
{
let cfg = match color {
Some("always") => ColorChoice::Always,
Some("never") => ColorChoice::Never,
Some("auto") | None => ColorChoice::CargoAuto,
Some(arg) => anyhow::bail!(
"argument for --color must be auto, always, or \
never, but found `{}`",
arg
),
};
*color_choice = cfg;
*stream = StandardStream::stderr(cfg.to_termcolor_color_choice());
}
Ok(())
}
/// Gets the current color choice.
///
/// If we are not using a color stream, this will always return `Never`, even if the color
/// choice has been set to something else.
pub fn color_choice(&self) -> ColorChoice {
match self.err {
ShellOut::Stream { color_choice,.. } => color_choice,
ShellOut::Write(_) => ColorChoice::Never,
}
}
/// Whether the shell supports color.
pub fn supports_color(&self) -> bool {
match &self.err {
ShellOut::Write(_) => false,
ShellOut::Stream { stream,.. } => stream.supports_color(),
}
}
/// Prints a message and translates ANSI escape code into console colors.
pub fn print_ansi(&mut self, message: &[u8]) -> CargoResult<()> {
if self.needs_clear {
self.err_erase_line();
}
#[cfg(windows)]
{
if let ShellOut::Stream { stream,.. } = &mut self.err {
::fwdansi::write_ansi(stream, message)?;
return Ok(());
}
}
self.err().write_all(message)?;
Ok(())
}
}
impl Default for Shell {
fn default() -> Self {
Self::new()
}
}
impl ShellOut {
/// Prints out a message with a status. The status comes first, and is bold plus the given
/// color. The status can be justified, in which case the max width that will right align is
/// 12 chars.
fn print(
&mut self,
status: &dyn fmt::Display,
message: Option<&dyn fmt::Display>,
color: Color,
justified: bool,
) -> CargoResult<()> {
match *self {
ShellOut::Stream { ref mut stream,.. } => {
stream.reset()?;
stream.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?;
if justified {
write!(stream, "{:>12}", status)?;
} else {
write!(stream, "{}", status)?;
stream.set_color(ColorSpec::new().set_bold(true))?;
write!(stream, ":")?;
}
stream.reset()?;
match message {
Some(message) => writeln!(stream, " {}", message)?,
None => write!(stream, " ")?,
}
}
ShellOut::Write(ref mut w) => {
if justified {
write!(w, "{:>12}", status)?;
} else {
write!(w, "{}:", status)?;
}
match message {
Some(message) => writeln!(w, " {}", message)?,
None => write!(w, " ")?,
}
}
}
Ok(())
}
/// Gets this object as a `io::Write`.
fn
|
(&mut self) -> &mut dyn Write {
match *self {
ShellOut::Stream { ref mut stream,.. } => stream,
ShellOut::Write(ref mut w) => w,
}
}
}
impl ColorChoice {
/// Converts our color choice to termcolor's version.
fn to_termcolor_color_choice(self) -> termcolor::ColorChoice {
match self {
ColorChoice::Always => termcolor::ColorChoice::Always,
ColorChoice::Never => termcolor::ColorChoice::Never,
ColorChoice::CargoAuto => {
if atty::is(atty::Stream::Stderr) {
termcolor::ColorChoice::Auto
} else {
termcolor::ColorChoice::Never
}
}
}
}
}
#[cfg(any(target_os = "linux", target_os = "macos", target_os = "freebsd"))]
mod imp {
use super::Shell;
use std::mem;
pub fn stderr_width() -> Option<usize> {
unsafe {
let mut winsize: libc::winsize = mem::zeroed();
// The.into() here is needed for FreeBSD which defines TIOCGWINSZ
// as c_uint but ioctl wants c_ulong.
if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ.into(), &mut winsize) < 0 {
return None;
}
if winsize.ws_col > 0 {
Some(winsize.ws_col as usize)
} else {
None
}
}
}
pub fn err_erase_line(shell: &mut Shell) {
// This is the "EL - Erase in Line" sequence. It clears from the cursor
// to the end of line.
// https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_sequences
let _ = shell.err.as_write().write_all(b"\x1B[K");
}
}
#[cfg(all(
unix,
not(any(target_os = "linux", target_os = "macos", target_os = "freebsd"))
))]
mod imp {
pub(super) use super::default_err_erase_line as err_erase_line;
pub fn stderr_width() -> Option<usize> {
None
}
}
#[cfg(windows)]
mod imp {
use std::{cmp, mem, ptr};
use winapi::um::fileapi::*;
use winapi::um::handleapi::*;
use winapi::um::processenv::*;
use winapi::um::winbase::*;
use winapi::um::wincon::*;
use winapi::um::winnt::*;
pub(super) use super::default_err_erase_line as err_erase_line;
pub fn stderr_width() -> Option<usize> {
unsafe {
let stdout = GetStdHandle(STD_ERROR_HANDLE);
let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
if GetConsoleScreenBufferInfo(stdout, &mut csbi)!= 0 {
return Some((csbi.srWindow.Right - csbi.srWindow.Left) as usize);
}
// On mintty/msys/cygwin based terminals, the above fails with
// INVALID_HANDLE_VALUE. Use an alternate method which works
// in that case as well.
let h = CreateFileA(
"CONOUT$\0".as_ptr() as *const CHAR,
GENERIC_READ | GENERIC_WRITE,
FILE_SHARE_READ | FILE_SHARE_WRITE,
ptr::null_mut(),
OPEN_EXISTING,
0,
ptr::null_mut(),
);
if h == INVALID_HANDLE_VALUE {
return None;
}
let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
let rc = GetConsoleScreenBufferInfo(h, &mut csbi);
CloseHandle(h);
if rc!= 0 {
let width = (csbi.srWindow.Right - csbi.srWindow.Left) as usize;
// Unfortunately cygwin/mintty does not set the size of the
// backing console to match the actual window size. This
// always reports a size of 80 or 120 (not sure what
// determines that). Use a conservative max of 60 which should
// work in most circumstances. ConEmu does some magic to
// resize the console correctly, but there's no reasonable way
// to detect which kind of terminal we are running in, or if
// GetConsoleScreenBufferInfo returns accurate information.
return Some(cmp::min(60, width));
}
None
}
}
}
#[cfg(any(
all(
unix,
not(any(target_os = "linux", target_os = "macos", target_os = "freebsd"))
),
windows,
))]
fn default_err_erase_line(shell: &mut Shell) {
if let Some(max_width) = imp::stderr_width() {
let blank = " ".repeat(max_width);
drop(write!(shell.err.as_write(), "{}\r", blank));
}
}
|
as_write
|
identifier_name
|
shell.rs
|
use std::fmt;
use std::io::prelude::*;
use termcolor::Color::{Cyan, Green, Red, Yellow};
use termcolor::{self, Color, ColorSpec, StandardStream, WriteColor};
use crate::util::errors::CargoResult;
/// The requested verbosity of output.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Verbosity {
Verbose,
Normal,
Quiet,
}
/// An abstraction around a `Write`able object that remembers preferences for output verbosity and
/// color.
pub struct Shell {
/// the `Write`able object, either with or without color support (represented by different enum
/// variants)
err: ShellOut,
/// How verbose messages should be
verbosity: Verbosity,
/// Flag that indicates the current line needs to be cleared before
/// printing. Used when a progress bar is currently displayed.
needs_clear: bool,
}
impl fmt::Debug for Shell {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.err {
ShellOut::Write(_) => f
.debug_struct("Shell")
.field("verbosity", &self.verbosity)
.finish(),
ShellOut::Stream { color_choice,.. } => f
.debug_struct("Shell")
.field("verbosity", &self.verbosity)
.field("color_choice", &color_choice)
.finish(),
}
}
}
/// A `Write`able object, either with or without color support
enum ShellOut {
/// A plain write object without color support
Write(Box<dyn Write>),
/// Color-enabled stdio, with information on whether color should be used
Stream {
stream: StandardStream,
tty: bool,
color_choice: ColorChoice,
},
}
/// Whether messages should use color output
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum ColorChoice {
/// Force color output
Always,
/// Force disable color output
Never,
/// Intelligently guess whether to use color output
CargoAuto,
}
impl Shell {
/// Creates a new shell (color choice and verbosity), defaulting to 'auto' color and verbose
/// output.
pub fn new() -> Shell {
Shell {
err: ShellOut::Stream {
stream: StandardStream::stderr(ColorChoice::CargoAuto.to_termcolor_color_choice()),
color_choice: ColorChoice::CargoAuto,
tty: atty::is(atty::Stream::Stderr),
},
verbosity: Verbosity::Verbose,
needs_clear: false,
}
}
/// Creates a shell from a plain writable object, with no color, and max verbosity.
pub fn from_write(out: Box<dyn Write>) -> Shell {
Shell {
err: ShellOut::Write(out),
verbosity: Verbosity::Verbose,
needs_clear: false,
}
}
/// Prints a message, where the status will have `color` color, and can be justified. The
/// messages follows without color.
fn print(
&mut self,
status: &dyn fmt::Display,
message: Option<&dyn fmt::Display>,
color: Color,
justified: bool,
) -> CargoResult<()> {
match self.verbosity {
Verbosity::Quiet => Ok(()),
_ => {
if self.needs_clear {
self.err_erase_line();
}
self.err.print(status, message, color, justified)
}
}
}
pub fn stdout_println(&mut self, message: impl fmt::Display) {
if self.needs_clear {
self.err_erase_line();
}
println!("{}", message);
}
/// Sets whether the next print should clear the current line.
pub fn set_needs_clear(&mut self, needs_clear: bool) {
self.needs_clear = needs_clear;
}
/// Returns `true` if the `needs_clear` flag is unset.
pub fn is_cleared(&self) -> bool {
!self.needs_clear
}
/// Returns the width of the terminal in spaces, if any.
pub fn err_width(&self) -> Option<usize> {
match self.err {
ShellOut::Stream { tty: true,.. } => imp::stderr_width(),
_ => None,
}
}
/// Returns `true` if stderr is a tty.
pub fn is_err_tty(&self) -> bool {
match self.err {
ShellOut::Stream { tty,.. } => tty,
_ => false,
}
}
/// Gets a reference to the underlying writer.
pub fn err(&mut self) -> &mut dyn Write {
if self.needs_clear {
self.err_erase_line();
}
self.err.as_write()
}
/// Erase from cursor to end of line.
pub fn err_erase_line(&mut self) {
if let ShellOut::Stream { tty: true,.. } = self.err {
imp::err_erase_line(self);
self.needs_clear = false;
}
}
/// Shortcut to right-align and color green a status message.
pub fn status<T, U>(&mut self, status: T, message: U) -> CargoResult<()>
where
T: fmt::Display,
U: fmt::Display,
{
self.print(&status, Some(&message), Green, true)
}
pub fn status_header<T>(&mut self, status: T) -> CargoResult<()>
where
T: fmt::Display,
{
self.print(&status, None, Cyan, true)
}
/// Shortcut to right-align a status message.
pub fn status_with_color<T, U>(
&mut self,
status: T,
message: U,
color: Color,
) -> CargoResult<()>
where
T: fmt::Display,
U: fmt::Display,
{
self.print(&status, Some(&message), color, true)
}
/// Runs the callback only if we are in verbose mode.
pub fn verbose<F>(&mut self, mut callback: F) -> CargoResult<()>
where
F: FnMut(&mut Shell) -> CargoResult<()>,
{
match self.verbosity {
Verbosity::Verbose => callback(self),
_ => Ok(()),
}
}
/// Runs the callback if we are not in verbose mode.
pub fn concise<F>(&mut self, mut callback: F) -> CargoResult<()>
where
F: FnMut(&mut Shell) -> CargoResult<()>,
{
match self.verbosity {
Verbosity::Verbose => Ok(()),
_ => callback(self),
}
}
/// Prints a red 'error' message.
pub fn error<T: fmt::Display>(&mut self, message: T) -> CargoResult<()> {
if self.needs_clear {
self.err_erase_line();
}
self.err.print(&"error", Some(&message), Red, false)
}
/// Prints an amber 'warning' message.
pub fn warn<T: fmt::Display>(&mut self, message: T) -> CargoResult<()> {
match self.verbosity {
Verbosity::Quiet => Ok(()),
_ => self.print(&"warning", Some(&message), Yellow, false),
}
}
/// Prints a cyan 'note' message.
pub fn note<T: fmt::Display>(&mut self, message: T) -> CargoResult<()> {
self.print(&"note", Some(&message), Cyan, false)
}
/// Updates the verbosity of the shell.
pub fn set_verbosity(&mut self, verbosity: Verbosity) {
self.verbosity = verbosity;
}
/// Gets the verbosity of the shell.
pub fn verbosity(&self) -> Verbosity {
self.verbosity
}
/// Updates the color choice (always, never, or auto) from a string..
pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> {
if let ShellOut::Stream {
ref mut stream,
ref mut color_choice,
..
} = self.err
{
let cfg = match color {
Some("always") => ColorChoice::Always,
Some("never") => ColorChoice::Never,
Some("auto") | None => ColorChoice::CargoAuto,
Some(arg) => anyhow::bail!(
"argument for --color must be auto, always, or \
never, but found `{}`",
arg
),
};
*color_choice = cfg;
*stream = StandardStream::stderr(cfg.to_termcolor_color_choice());
}
Ok(())
}
/// Gets the current color choice.
///
/// If we are not using a color stream, this will always return `Never`, even if the color
/// choice has been set to something else.
pub fn color_choice(&self) -> ColorChoice {
match self.err {
ShellOut::Stream { color_choice,.. } => color_choice,
ShellOut::Write(_) => ColorChoice::Never,
}
}
/// Whether the shell supports color.
pub fn supports_color(&self) -> bool {
match &self.err {
ShellOut::Write(_) => false,
ShellOut::Stream { stream,.. } => stream.supports_color(),
}
}
/// Prints a message and translates ANSI escape code into console colors.
pub fn print_ansi(&mut self, message: &[u8]) -> CargoResult<()> {
if self.needs_clear {
self.err_erase_line();
}
#[cfg(windows)]
{
if let ShellOut::Stream { stream,.. } = &mut self.err {
::fwdansi::write_ansi(stream, message)?;
return Ok(());
}
}
self.err().write_all(message)?;
Ok(())
}
}
impl Default for Shell {
fn default() -> Self {
Self::new()
}
}
impl ShellOut {
/// Prints out a message with a status. The status comes first, and is bold plus the given
/// color. The status can be justified, in which case the max width that will right align is
/// 12 chars.
fn print(
&mut self,
status: &dyn fmt::Display,
message: Option<&dyn fmt::Display>,
color: Color,
justified: bool,
) -> CargoResult<()> {
match *self {
ShellOut::Stream { ref mut stream,.. } => {
stream.reset()?;
stream.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?;
if justified {
write!(stream, "{:>12}", status)?;
} else {
write!(stream, "{}", status)?;
stream.set_color(ColorSpec::new().set_bold(true))?;
write!(stream, ":")?;
}
stream.reset()?;
match message {
Some(message) => writeln!(stream, " {}", message)?,
None => write!(stream, " ")?,
}
}
ShellOut::Write(ref mut w) => {
if justified {
write!(w, "{:>12}", status)?;
} else {
write!(w, "{}:", status)?;
}
match message {
Some(message) => writeln!(w, " {}", message)?,
None => write!(w, " ")?,
}
}
}
Ok(())
}
/// Gets this object as a `io::Write`.
fn as_write(&mut self) -> &mut dyn Write {
match *self {
ShellOut::Stream { ref mut stream,.. } => stream,
ShellOut::Write(ref mut w) => w,
}
}
}
impl ColorChoice {
/// Converts our color choice to termcolor's version.
fn to_termcolor_color_choice(self) -> termcolor::ColorChoice {
match self {
ColorChoice::Always => termcolor::ColorChoice::Always,
ColorChoice::Never => termcolor::ColorChoice::Never,
ColorChoice::CargoAuto => {
if atty::is(atty::Stream::Stderr) {
termcolor::ColorChoice::Auto
} else {
termcolor::ColorChoice::Never
}
}
}
}
}
#[cfg(any(target_os = "linux", target_os = "macos", target_os = "freebsd"))]
mod imp {
use super::Shell;
use std::mem;
pub fn stderr_width() -> Option<usize> {
unsafe {
let mut winsize: libc::winsize = mem::zeroed();
// The.into() here is needed for FreeBSD which defines TIOCGWINSZ
// as c_uint but ioctl wants c_ulong.
if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ.into(), &mut winsize) < 0 {
return None;
}
if winsize.ws_col > 0 {
Some(winsize.ws_col as usize)
} else {
None
}
}
}
pub fn err_erase_line(shell: &mut Shell) {
// This is the "EL - Erase in Line" sequence. It clears from the cursor
// to the end of line.
// https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_sequences
let _ = shell.err.as_write().write_all(b"\x1B[K");
}
}
#[cfg(all(
unix,
not(any(target_os = "linux", target_os = "macos", target_os = "freebsd"))
))]
mod imp {
pub(super) use super::default_err_erase_line as err_erase_line;
pub fn stderr_width() -> Option<usize> {
None
}
}
#[cfg(windows)]
mod imp {
use std::{cmp, mem, ptr};
|
use winapi::um::winbase::*;
use winapi::um::wincon::*;
use winapi::um::winnt::*;
pub(super) use super::default_err_erase_line as err_erase_line;
pub fn stderr_width() -> Option<usize> {
unsafe {
let stdout = GetStdHandle(STD_ERROR_HANDLE);
let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
if GetConsoleScreenBufferInfo(stdout, &mut csbi)!= 0 {
return Some((csbi.srWindow.Right - csbi.srWindow.Left) as usize);
}
// On mintty/msys/cygwin based terminals, the above fails with
// INVALID_HANDLE_VALUE. Use an alternate method which works
// in that case as well.
let h = CreateFileA(
"CONOUT$\0".as_ptr() as *const CHAR,
GENERIC_READ | GENERIC_WRITE,
FILE_SHARE_READ | FILE_SHARE_WRITE,
ptr::null_mut(),
OPEN_EXISTING,
0,
ptr::null_mut(),
);
if h == INVALID_HANDLE_VALUE {
return None;
}
let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
let rc = GetConsoleScreenBufferInfo(h, &mut csbi);
CloseHandle(h);
if rc!= 0 {
let width = (csbi.srWindow.Right - csbi.srWindow.Left) as usize;
// Unfortunately cygwin/mintty does not set the size of the
// backing console to match the actual window size. This
// always reports a size of 80 or 120 (not sure what
// determines that). Use a conservative max of 60 which should
// work in most circumstances. ConEmu does some magic to
// resize the console correctly, but there's no reasonable way
// to detect which kind of terminal we are running in, or if
// GetConsoleScreenBufferInfo returns accurate information.
return Some(cmp::min(60, width));
}
None
}
}
}
#[cfg(any(
all(
unix,
not(any(target_os = "linux", target_os = "macos", target_os = "freebsd"))
),
windows,
))]
fn default_err_erase_line(shell: &mut Shell) {
if let Some(max_width) = imp::stderr_width() {
let blank = " ".repeat(max_width);
drop(write!(shell.err.as_write(), "{}\r", blank));
}
}
|
use winapi::um::fileapi::*;
use winapi::um::handleapi::*;
use winapi::um::processenv::*;
|
random_line_split
|
static-method-on-struct-and-enum.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// min-lldb-version: 310
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// STRUCT
// gdb-command:print arg1
// gdb-check:$1 = 1
// gdb-command:print arg2
// gdb-check:$2 = 2
// gdb-command:continue
// ENUM
// gdb-command:print arg1
// gdb-check:$3 = -3
// gdb-command:print arg2
// gdb-check:$4 = 4.5
// gdb-command:print arg3
// gdb-check:$5 = 5
// gdb-command:continue
// === LLDB TESTS ==================================================================================
// lldb-command:run
// STRUCT
// lldb-command:print arg1
// lldbg-check:[...]$0 = 1
// lldbr-check:(isize) arg1 = 1
// lldb-command:print arg2
// lldbg-check:[...]$1 = 2
// lldbr-check:(isize) arg2 = 2
// lldb-command:continue
// ENUM
// lldb-command:print arg1
// lldbg-check:[...]$2 = -3
// lldbr-check:(isize) arg1 = -3
// lldb-command:print arg2
// lldbg-check:[...]$3 = 4.5
// lldbr-check:(f64) arg2 = 4.5
// lldb-command:print arg3
// lldbg-check:[...]$4 = 5
// lldbr-check:(usize) arg3 = 5
// lldb-command:continue
#![feature(omit_gdb_pretty_printer_section)]
#![omit_gdb_pretty_printer_section]
struct Struct {
x: isize
}
impl Struct {
fn static_method(arg1: isize, arg2: isize) -> isize {
zzz(); // #break
arg1 + arg2
}
}
enum
|
{
Variant1 { x: isize },
Variant2,
Variant3(f64, isize, char),
}
impl Enum {
fn static_method(arg1: isize, arg2: f64, arg3: usize) -> isize {
zzz(); // #break
arg1
}
}
fn main() {
Struct::static_method(1, 2);
Enum::static_method(-3, 4.5, 5);
}
fn zzz() {()}
|
Enum
|
identifier_name
|
static-method-on-struct-and-enum.rs
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// min-lldb-version: 310
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// STRUCT
// gdb-command:print arg1
// gdb-check:$1 = 1
// gdb-command:print arg2
// gdb-check:$2 = 2
// gdb-command:continue
// ENUM
// gdb-command:print arg1
// gdb-check:$3 = -3
// gdb-command:print arg2
// gdb-check:$4 = 4.5
// gdb-command:print arg3
// gdb-check:$5 = 5
// gdb-command:continue
// === LLDB TESTS ==================================================================================
// lldb-command:run
// STRUCT
// lldb-command:print arg1
// lldbg-check:[...]$0 = 1
// lldbr-check:(isize) arg1 = 1
// lldb-command:print arg2
// lldbg-check:[...]$1 = 2
// lldbr-check:(isize) arg2 = 2
// lldb-command:continue
// ENUM
// lldb-command:print arg1
// lldbg-check:[...]$2 = -3
// lldbr-check:(isize) arg1 = -3
// lldb-command:print arg2
// lldbg-check:[...]$3 = 4.5
// lldbr-check:(f64) arg2 = 4.5
// lldb-command:print arg3
// lldbg-check:[...]$4 = 5
// lldbr-check:(usize) arg3 = 5
// lldb-command:continue
#![feature(omit_gdb_pretty_printer_section)]
#![omit_gdb_pretty_printer_section]
struct Struct {
x: isize
}
impl Struct {
fn static_method(arg1: isize, arg2: isize) -> isize {
zzz(); // #break
arg1 + arg2
}
}
enum Enum {
Variant1 { x: isize },
Variant2,
Variant3(f64, isize, char),
}
impl Enum {
fn static_method(arg1: isize, arg2: f64, arg3: usize) -> isize {
zzz(); // #break
arg1
}
}
fn main() {
Struct::static_method(1, 2);
Enum::static_method(-3, 4.5, 5);
}
fn zzz() {()}
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
random_line_split
|
|
record.rs
|
use std::ops::Deref;
use config::gobjects::GObject;
use env::Env;
use library;
use nameutil::*;
use super::*;
use super::imports::Imports;
use super::info_base::InfoBase;
use traits::*;
#[derive(Default)]
pub struct Info {
pub base: InfoBase,
}
impl Deref for Info {
type Target = InfoBase;
fn deref(&self) -> &InfoBase {
&self.base
}
}
impl Info {
//TODO: add test in tests/ for panic
pub fn type_<'a>(&self, library: &'a library::Library) -> &'a library::Record {
let type_ = library.type_(self.type_id).maybe_ref()
.unwrap_or_else(|| panic!("{} is not a record.", self.full_name));
type_
}
}
pub fn new(env: &Env, obj: &GObject) -> Option<Info> {
let full_name = obj.name.clone();
let record_tid = match env.library.find_type(0, &full_name) {
Some(tid) => tid,
None => return None,
};
let type_ = env.type_(record_tid);
let name: String = split_namespace_name(&full_name).1.into();
let record: &library::Record = match type_.maybe_ref() {
Some(record) => record,
None => return None,
};
let mut imports = Imports::new();
imports.add("glib::translate::*", None);
imports.add("ffi", None);
let mut functions = functions::analyze(env, &record.functions, record_tid, &obj, &mut imports);
let specials = special_functions::extract(&mut functions);
let (version, deprecated_version) = info_base::versions(env, &obj, &functions, record.version,
record.deprecated_version);
let is_shared = specials.get(&special_functions::Type::Ref).is_some() &&
specials.get(&special_functions::Type::Unref).is_some();
if is_shared
|
;
special_functions::analyze_imports(&specials, &mut imports);
//don't `use` yourself
imports.remove(&name);
let base = InfoBase {
full_name: full_name,
type_id: record_tid,
name: name,
functions: functions,
specials: specials,
imports: imports,
version: version,
deprecated_version: deprecated_version,
cfg_condition: obj.cfg_condition.clone(),
};
let info = Info {
base: base,
};
Some(info)
}
|
{
// `copy` will duplicate a struct while `clone` just adds a reference
special_functions::unhide(&mut functions, &specials, special_functions::Type::Copy);
}
|
conditional_block
|
record.rs
|
use std::ops::Deref;
use config::gobjects::GObject;
use env::Env;
use library;
use nameutil::*;
use super::*;
use super::imports::Imports;
use super::info_base::InfoBase;
use traits::*;
#[derive(Default)]
pub struct Info {
pub base: InfoBase,
}
impl Deref for Info {
type Target = InfoBase;
fn deref(&self) -> &InfoBase {
&self.base
}
}
impl Info {
//TODO: add test in tests/ for panic
pub fn type_<'a>(&self, library: &'a library::Library) -> &'a library::Record
|
}
pub fn new(env: &Env, obj: &GObject) -> Option<Info> {
let full_name = obj.name.clone();
let record_tid = match env.library.find_type(0, &full_name) {
Some(tid) => tid,
None => return None,
};
let type_ = env.type_(record_tid);
let name: String = split_namespace_name(&full_name).1.into();
let record: &library::Record = match type_.maybe_ref() {
Some(record) => record,
None => return None,
};
let mut imports = Imports::new();
imports.add("glib::translate::*", None);
imports.add("ffi", None);
let mut functions = functions::analyze(env, &record.functions, record_tid, &obj, &mut imports);
let specials = special_functions::extract(&mut functions);
let (version, deprecated_version) = info_base::versions(env, &obj, &functions, record.version,
record.deprecated_version);
let is_shared = specials.get(&special_functions::Type::Ref).is_some() &&
specials.get(&special_functions::Type::Unref).is_some();
if is_shared {
// `copy` will duplicate a struct while `clone` just adds a reference
special_functions::unhide(&mut functions, &specials, special_functions::Type::Copy);
};
special_functions::analyze_imports(&specials, &mut imports);
//don't `use` yourself
imports.remove(&name);
let base = InfoBase {
full_name: full_name,
type_id: record_tid,
name: name,
functions: functions,
specials: specials,
imports: imports,
version: version,
deprecated_version: deprecated_version,
cfg_condition: obj.cfg_condition.clone(),
};
let info = Info {
base: base,
};
Some(info)
}
|
{
let type_ = library.type_(self.type_id).maybe_ref()
.unwrap_or_else(|| panic!("{} is not a record.", self.full_name));
type_
}
|
identifier_body
|
record.rs
|
use std::ops::Deref;
use config::gobjects::GObject;
use env::Env;
use library;
use nameutil::*;
use super::*;
use super::imports::Imports;
use super::info_base::InfoBase;
use traits::*;
#[derive(Default)]
pub struct Info {
pub base: InfoBase,
}
impl Deref for Info {
type Target = InfoBase;
fn deref(&self) -> &InfoBase {
&self.base
}
}
impl Info {
//TODO: add test in tests/ for panic
pub fn type_<'a>(&self, library: &'a library::Library) -> &'a library::Record {
let type_ = library.type_(self.type_id).maybe_ref()
.unwrap_or_else(|| panic!("{} is not a record.", self.full_name));
type_
}
}
pub fn
|
(env: &Env, obj: &GObject) -> Option<Info> {
let full_name = obj.name.clone();
let record_tid = match env.library.find_type(0, &full_name) {
Some(tid) => tid,
None => return None,
};
let type_ = env.type_(record_tid);
let name: String = split_namespace_name(&full_name).1.into();
let record: &library::Record = match type_.maybe_ref() {
Some(record) => record,
None => return None,
};
let mut imports = Imports::new();
imports.add("glib::translate::*", None);
imports.add("ffi", None);
let mut functions = functions::analyze(env, &record.functions, record_tid, &obj, &mut imports);
let specials = special_functions::extract(&mut functions);
let (version, deprecated_version) = info_base::versions(env, &obj, &functions, record.version,
record.deprecated_version);
let is_shared = specials.get(&special_functions::Type::Ref).is_some() &&
specials.get(&special_functions::Type::Unref).is_some();
if is_shared {
// `copy` will duplicate a struct while `clone` just adds a reference
special_functions::unhide(&mut functions, &specials, special_functions::Type::Copy);
};
special_functions::analyze_imports(&specials, &mut imports);
//don't `use` yourself
imports.remove(&name);
let base = InfoBase {
full_name: full_name,
type_id: record_tid,
name: name,
functions: functions,
specials: specials,
imports: imports,
version: version,
deprecated_version: deprecated_version,
cfg_condition: obj.cfg_condition.clone(),
};
let info = Info {
base: base,
};
Some(info)
}
|
new
|
identifier_name
|
record.rs
|
use std::ops::Deref;
use config::gobjects::GObject;
use env::Env;
use library;
use nameutil::*;
use super::*;
use super::imports::Imports;
use super::info_base::InfoBase;
use traits::*;
#[derive(Default)]
pub struct Info {
pub base: InfoBase,
}
impl Deref for Info {
type Target = InfoBase;
fn deref(&self) -> &InfoBase {
&self.base
}
}
impl Info {
//TODO: add test in tests/ for panic
pub fn type_<'a>(&self, library: &'a library::Library) -> &'a library::Record {
let type_ = library.type_(self.type_id).maybe_ref()
.unwrap_or_else(|| panic!("{} is not a record.", self.full_name));
type_
}
}
pub fn new(env: &Env, obj: &GObject) -> Option<Info> {
let full_name = obj.name.clone();
let record_tid = match env.library.find_type(0, &full_name) {
Some(tid) => tid,
None => return None,
};
|
let record: &library::Record = match type_.maybe_ref() {
Some(record) => record,
None => return None,
};
let mut imports = Imports::new();
imports.add("glib::translate::*", None);
imports.add("ffi", None);
let mut functions = functions::analyze(env, &record.functions, record_tid, &obj, &mut imports);
let specials = special_functions::extract(&mut functions);
let (version, deprecated_version) = info_base::versions(env, &obj, &functions, record.version,
record.deprecated_version);
let is_shared = specials.get(&special_functions::Type::Ref).is_some() &&
specials.get(&special_functions::Type::Unref).is_some();
if is_shared {
// `copy` will duplicate a struct while `clone` just adds a reference
special_functions::unhide(&mut functions, &specials, special_functions::Type::Copy);
};
special_functions::analyze_imports(&specials, &mut imports);
//don't `use` yourself
imports.remove(&name);
let base = InfoBase {
full_name: full_name,
type_id: record_tid,
name: name,
functions: functions,
specials: specials,
imports: imports,
version: version,
deprecated_version: deprecated_version,
cfg_condition: obj.cfg_condition.clone(),
};
let info = Info {
base: base,
};
Some(info)
}
|
let type_ = env.type_(record_tid);
let name: String = split_namespace_name(&full_name).1.into();
|
random_line_split
|
get_message_events.rs
|
//! [GET /_matrix/client/r0/rooms/{roomId}/messages](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-rooms-roomid-messages)
use js_int::UInt;
use ruma_api::ruma_api;
use ruma_events::{
collections::all::{RoomEvent, StateEvent},
EventJson,
};
use ruma_identifiers::RoomId;
use serde::{Deserialize, Serialize};
use crate::r0::filter::RoomEventFilter;
ruma_api! {
metadata {
description: "Get message events for a room.",
method: GET,
name: "get_message_events",
path: "/_matrix/client/r0/rooms/:room_id/messages",
rate_limited: false,
requires_authentication: true,
}
request {
/// The room to get events from.
#[ruma_api(path)]
pub room_id: RoomId,
/// The token to start returning events from.
///
/// This token can be obtained from a
/// prev_batch token returned for each room by the sync API, or from a start or end token
/// returned by a previous request to this endpoint.
#[ruma_api(query)]
pub from: String,
/// The token to stop returning events at.
///
/// This token can be obtained from a prev_batch
/// token returned for each room by the sync endpoint, or from a start or end token returned
/// by a previous request to this endpoint.
#[serde(skip_serializing_if = "Option::is_none")]
#[ruma_api(query)]
pub to: Option<String>,
/// The direction to return events from.
#[ruma_api(query)]
pub dir: Direction,
/// The maximum number of events to return.
///
/// Default: 10.
#[serde(skip_serializing_if = "Option::is_none")]
#[ruma_api(query)]
pub limit: Option<UInt>,
/// A RoomEventFilter to filter returned events with.
#[ruma_api(query)]
#[serde(
with = "ruma_serde::json_string",
default,
skip_serializing_if = "Option::is_none"
)]
pub filter: Option<RoomEventFilter>,
}
response {
/// The token the pagination starts from.
#[serde(skip_serializing_if = "Option::is_none")]
pub start: Option<String>,
/// The token the pagination ends at.
#[serde(skip_serializing_if = "Option::is_none")]
pub end: Option<String>,
/// A list of room events.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub chunk: Vec<EventJson<RoomEvent>>,
/// A list of state events relevant to showing the `chunk`.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub state: Vec<EventJson<StateEvent>>,
}
error: crate::Error
}
/// The direction to return events from.
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
pub enum Direction {
/// Return events backwards in time from the requested `from` token.
#[serde(rename = "b")]
Backward,
/// Return events forwards in time from the requested `from` token.
#[serde(rename = "f")]
Forward,
}
#[cfg(test)]
mod tests {
use super::{Direction, Request};
use std::convert::{TryFrom, TryInto};
use js_int::UInt;
use ruma_identifiers::RoomId;
use crate::r0::filter::{LazyLoadOptions, RoomEventFilter};
#[test]
fn test_serialize_some_room_event_filter() {
let room_id = RoomId::try_from("!roomid:example.org").unwrap();
let filter = RoomEventFilter {
lazy_load_options: LazyLoadOptions::Enabled {
include_redundant_members: true,
},
rooms: Some(vec![room_id.clone()]),
not_rooms: vec!["room".into(), "room2".into(), "room3".into()],
not_types: vec!["type".into()],
..Default::default()
};
let req = Request {
room_id,
from: "token".into(),
to: Some("token2".into()),
dir: Direction::Backward,
limit: Some(UInt::from(0u32)),
filter: Some(filter),
};
let request: http::Request<Vec<u8>> = req.try_into().unwrap();
assert_eq!(
"from=token&to=token2&dir=b&limit=0&filter=%7B%22not_types%22%3A%5B%22type%22%5D%2C%22not_rooms%22%3A%5B%22room%22%2C%22room2%22%2C%22room3%22%5D%2C%22rooms%22%3A%5B%22%21roomid%3Aexample.org%22%5D%2C%22lazy_load_members%22%3Atrue%2C%22include_redundant_members%22%3Atrue%7D",
request.uri().query().unwrap()
);
}
#[test]
fn test_serialize_none_room_event_filter() {
let room_id = RoomId::try_from("!roomid:example.org").unwrap();
let req = Request {
room_id,
from: "token".into(),
to: Some("token2".into()),
dir: Direction::Backward,
limit: Some(UInt::from(0u32)),
filter: None,
};
let request: http::Request<Vec<u8>> = req.try_into().unwrap();
assert_eq!(
"from=token&to=token2&dir=b&limit=0",
request.uri().query().unwrap(),
);
}
#[test]
fn
|
() {
let room_id = RoomId::try_from("!roomid:example.org").unwrap();
let req = Request {
room_id,
from: "token".into(),
to: Some("token2".into()),
dir: Direction::Backward,
limit: Some(UInt::from(0u32)),
filter: Some(RoomEventFilter::default()),
};
let request: http::Request<Vec<u8>> = req.try_into().unwrap();
assert_eq!(
"from=token&to=token2&dir=b&limit=0&filter=%7B%7D",
request.uri().query().unwrap(),
);
}
}
|
test_serialize_default_room_event_filter
|
identifier_name
|
get_message_events.rs
|
//! [GET /_matrix/client/r0/rooms/{roomId}/messages](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-rooms-roomid-messages)
use js_int::UInt;
use ruma_api::ruma_api;
use ruma_events::{
collections::all::{RoomEvent, StateEvent},
EventJson,
};
use ruma_identifiers::RoomId;
use serde::{Deserialize, Serialize};
use crate::r0::filter::RoomEventFilter;
ruma_api! {
metadata {
description: "Get message events for a room.",
method: GET,
name: "get_message_events",
path: "/_matrix/client/r0/rooms/:room_id/messages",
rate_limited: false,
requires_authentication: true,
}
request {
/// The room to get events from.
#[ruma_api(path)]
pub room_id: RoomId,
/// The token to start returning events from.
///
/// This token can be obtained from a
/// prev_batch token returned for each room by the sync API, or from a start or end token
/// returned by a previous request to this endpoint.
#[ruma_api(query)]
pub from: String,
/// The token to stop returning events at.
///
/// This token can be obtained from a prev_batch
/// token returned for each room by the sync endpoint, or from a start or end token returned
/// by a previous request to this endpoint.
#[serde(skip_serializing_if = "Option::is_none")]
#[ruma_api(query)]
pub to: Option<String>,
/// The direction to return events from.
#[ruma_api(query)]
pub dir: Direction,
/// The maximum number of events to return.
///
/// Default: 10.
#[serde(skip_serializing_if = "Option::is_none")]
#[ruma_api(query)]
pub limit: Option<UInt>,
/// A RoomEventFilter to filter returned events with.
#[ruma_api(query)]
#[serde(
with = "ruma_serde::json_string",
default,
skip_serializing_if = "Option::is_none"
)]
pub filter: Option<RoomEventFilter>,
}
response {
/// The token the pagination starts from.
#[serde(skip_serializing_if = "Option::is_none")]
pub start: Option<String>,
/// The token the pagination ends at.
#[serde(skip_serializing_if = "Option::is_none")]
pub end: Option<String>,
/// A list of room events.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub chunk: Vec<EventJson<RoomEvent>>,
/// A list of state events relevant to showing the `chunk`.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub state: Vec<EventJson<StateEvent>>,
}
error: crate::Error
}
/// The direction to return events from.
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
pub enum Direction {
/// Return events backwards in time from the requested `from` token.
#[serde(rename = "b")]
Backward,
/// Return events forwards in time from the requested `from` token.
#[serde(rename = "f")]
Forward,
}
#[cfg(test)]
mod tests {
use super::{Direction, Request};
use std::convert::{TryFrom, TryInto};
use js_int::UInt;
use ruma_identifiers::RoomId;
use crate::r0::filter::{LazyLoadOptions, RoomEventFilter};
#[test]
fn test_serialize_some_room_event_filter() {
let room_id = RoomId::try_from("!roomid:example.org").unwrap();
let filter = RoomEventFilter {
lazy_load_options: LazyLoadOptions::Enabled {
include_redundant_members: true,
},
rooms: Some(vec![room_id.clone()]),
not_rooms: vec!["room".into(), "room2".into(), "room3".into()],
not_types: vec!["type".into()],
..Default::default()
};
let req = Request {
room_id,
from: "token".into(),
to: Some("token2".into()),
dir: Direction::Backward,
limit: Some(UInt::from(0u32)),
filter: Some(filter),
};
let request: http::Request<Vec<u8>> = req.try_into().unwrap();
assert_eq!(
"from=token&to=token2&dir=b&limit=0&filter=%7B%22not_types%22%3A%5B%22type%22%5D%2C%22not_rooms%22%3A%5B%22room%22%2C%22room2%22%2C%22room3%22%5D%2C%22rooms%22%3A%5B%22%21roomid%3Aexample.org%22%5D%2C%22lazy_load_members%22%3Atrue%2C%22include_redundant_members%22%3Atrue%7D",
request.uri().query().unwrap()
);
}
#[test]
fn test_serialize_none_room_event_filter() {
let room_id = RoomId::try_from("!roomid:example.org").unwrap();
let req = Request {
room_id,
from: "token".into(),
to: Some("token2".into()),
dir: Direction::Backward,
limit: Some(UInt::from(0u32)),
filter: None,
};
let request: http::Request<Vec<u8>> = req.try_into().unwrap();
assert_eq!(
"from=token&to=token2&dir=b&limit=0",
request.uri().query().unwrap(),
);
}
#[test]
fn test_serialize_default_room_event_filter()
|
}
|
{
let room_id = RoomId::try_from("!roomid:example.org").unwrap();
let req = Request {
room_id,
from: "token".into(),
to: Some("token2".into()),
dir: Direction::Backward,
limit: Some(UInt::from(0u32)),
filter: Some(RoomEventFilter::default()),
};
let request: http::Request<Vec<u8>> = req.try_into().unwrap();
assert_eq!(
"from=token&to=token2&dir=b&limit=0&filter=%7B%7D",
request.uri().query().unwrap(),
);
}
|
identifier_body
|
get_message_events.rs
|
//! [GET /_matrix/client/r0/rooms/{roomId}/messages](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-rooms-roomid-messages)
use js_int::UInt;
use ruma_api::ruma_api;
use ruma_events::{
collections::all::{RoomEvent, StateEvent},
EventJson,
};
use ruma_identifiers::RoomId;
use serde::{Deserialize, Serialize};
use crate::r0::filter::RoomEventFilter;
ruma_api! {
metadata {
description: "Get message events for a room.",
method: GET,
name: "get_message_events",
path: "/_matrix/client/r0/rooms/:room_id/messages",
rate_limited: false,
requires_authentication: true,
}
request {
/// The room to get events from.
#[ruma_api(path)]
pub room_id: RoomId,
|
/// returned by a previous request to this endpoint.
#[ruma_api(query)]
pub from: String,
/// The token to stop returning events at.
///
/// This token can be obtained from a prev_batch
/// token returned for each room by the sync endpoint, or from a start or end token returned
/// by a previous request to this endpoint.
#[serde(skip_serializing_if = "Option::is_none")]
#[ruma_api(query)]
pub to: Option<String>,
/// The direction to return events from.
#[ruma_api(query)]
pub dir: Direction,
/// The maximum number of events to return.
///
/// Default: 10.
#[serde(skip_serializing_if = "Option::is_none")]
#[ruma_api(query)]
pub limit: Option<UInt>,
/// A RoomEventFilter to filter returned events with.
#[ruma_api(query)]
#[serde(
with = "ruma_serde::json_string",
default,
skip_serializing_if = "Option::is_none"
)]
pub filter: Option<RoomEventFilter>,
}
response {
/// The token the pagination starts from.
#[serde(skip_serializing_if = "Option::is_none")]
pub start: Option<String>,
/// The token the pagination ends at.
#[serde(skip_serializing_if = "Option::is_none")]
pub end: Option<String>,
/// A list of room events.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub chunk: Vec<EventJson<RoomEvent>>,
/// A list of state events relevant to showing the `chunk`.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub state: Vec<EventJson<StateEvent>>,
}
error: crate::Error
}
/// The direction to return events from.
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
pub enum Direction {
/// Return events backwards in time from the requested `from` token.
#[serde(rename = "b")]
Backward,
/// Return events forwards in time from the requested `from` token.
#[serde(rename = "f")]
Forward,
}
#[cfg(test)]
mod tests {
use super::{Direction, Request};
use std::convert::{TryFrom, TryInto};
use js_int::UInt;
use ruma_identifiers::RoomId;
use crate::r0::filter::{LazyLoadOptions, RoomEventFilter};
#[test]
fn test_serialize_some_room_event_filter() {
let room_id = RoomId::try_from("!roomid:example.org").unwrap();
let filter = RoomEventFilter {
lazy_load_options: LazyLoadOptions::Enabled {
include_redundant_members: true,
},
rooms: Some(vec![room_id.clone()]),
not_rooms: vec!["room".into(), "room2".into(), "room3".into()],
not_types: vec!["type".into()],
..Default::default()
};
let req = Request {
room_id,
from: "token".into(),
to: Some("token2".into()),
dir: Direction::Backward,
limit: Some(UInt::from(0u32)),
filter: Some(filter),
};
let request: http::Request<Vec<u8>> = req.try_into().unwrap();
assert_eq!(
"from=token&to=token2&dir=b&limit=0&filter=%7B%22not_types%22%3A%5B%22type%22%5D%2C%22not_rooms%22%3A%5B%22room%22%2C%22room2%22%2C%22room3%22%5D%2C%22rooms%22%3A%5B%22%21roomid%3Aexample.org%22%5D%2C%22lazy_load_members%22%3Atrue%2C%22include_redundant_members%22%3Atrue%7D",
request.uri().query().unwrap()
);
}
#[test]
fn test_serialize_none_room_event_filter() {
let room_id = RoomId::try_from("!roomid:example.org").unwrap();
let req = Request {
room_id,
from: "token".into(),
to: Some("token2".into()),
dir: Direction::Backward,
limit: Some(UInt::from(0u32)),
filter: None,
};
let request: http::Request<Vec<u8>> = req.try_into().unwrap();
assert_eq!(
"from=token&to=token2&dir=b&limit=0",
request.uri().query().unwrap(),
);
}
#[test]
fn test_serialize_default_room_event_filter() {
let room_id = RoomId::try_from("!roomid:example.org").unwrap();
let req = Request {
room_id,
from: "token".into(),
to: Some("token2".into()),
dir: Direction::Backward,
limit: Some(UInt::from(0u32)),
filter: Some(RoomEventFilter::default()),
};
let request: http::Request<Vec<u8>> = req.try_into().unwrap();
assert_eq!(
"from=token&to=token2&dir=b&limit=0&filter=%7B%7D",
request.uri().query().unwrap(),
);
}
}
|
/// The token to start returning events from.
///
/// This token can be obtained from a
/// prev_batch token returned for each room by the sync API, or from a start or end token
|
random_line_split
|
layout_debug.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Supports writing a trace file created during each layout scope
//! that can be viewed by an external tool to make layout debugging easier.
#![macro_use]
use flow_ref::FlowRef;
use flow;
use rustc_serialize::json;
use std::borrow::ToOwned;
use std::cell::RefCell;
use std::old_io::File;
use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
thread_local!(static STATE_KEY: RefCell<Option<State>> = RefCell::new(None));
static mut DEBUG_ID_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
pub struct
|
;
#[macro_export]
macro_rules! layout_debug_scope(
($($arg:tt)*) => (
if cfg!(not(ndebug)) {
layout_debug::Scope::new(format!($($arg)*))
} else {
layout_debug::Scope
}
)
);
#[derive(RustcEncodable)]
struct ScopeData {
name: String,
pre: String,
post: String,
children: Vec<Box<ScopeData>>,
}
impl ScopeData {
fn new(name: String, pre: String) -> ScopeData {
ScopeData {
name: name,
pre: pre,
post: String::new(),
children: vec!(),
}
}
}
struct State {
flow_root: FlowRef,
scope_stack: Vec<Box<ScopeData>>,
}
/// A layout debugging scope. The entire state of the flow tree
/// will be output at the beginning and end of this scope.
impl Scope {
pub fn new(name: String) -> Scope {
STATE_KEY.with(|ref r| {
match &mut *r.borrow_mut() {
&mut Some(ref mut state) => {
let flow_trace = json::encode(&flow::base(&*state.flow_root)).unwrap();
let data = box ScopeData::new(name.clone(), flow_trace);
state.scope_stack.push(data);
}
&mut None => {}
}
});
Scope
}
}
#[cfg(not(ndebug))]
impl Drop for Scope {
fn drop(&mut self) {
STATE_KEY.with(|ref r| {
match &mut *r.borrow_mut() {
&mut Some(ref mut state) => {
let mut current_scope = state.scope_stack.pop().unwrap();
current_scope.post = json::encode(&flow::base(&*state.flow_root)).unwrap();
let previous_scope = state.scope_stack.last_mut().unwrap();
previous_scope.children.push(current_scope);
}
&mut None => {}
}
});
}
}
/// Generate a unique ID. This is used for items such as Fragment
/// which are often reallocated but represent essentially the
/// same data.
#[allow(unsafe_blocks)]
pub fn generate_unique_debug_id() -> u16 {
unsafe { DEBUG_ID_COUNTER.fetch_add(1, Ordering::SeqCst) as u16 }
}
/// Begin a layout debug trace. If this has not been called,
/// creating debug scopes has no effect.
pub fn begin_trace(flow_root: FlowRef) {
assert!(STATE_KEY.with(|ref r| r.borrow().is_none()));
STATE_KEY.with(|ref r| {
let flow_trace = json::encode(&flow::base(&*flow_root)).unwrap();
let state = State {
scope_stack: vec![box ScopeData::new("root".to_owned(), flow_trace)],
flow_root: flow_root.clone(),
};
*r.borrow_mut() = Some(state);
});
}
/// End the debug layout trace. This will write the layout
/// trace to disk in the current directory. The output
/// file can then be viewed with an external tool.
pub fn end_trace() {
let mut task_state = STATE_KEY.with(|ref r| r.borrow_mut().take().unwrap());
assert!(task_state.scope_stack.len() == 1);
let mut root_scope = task_state.scope_stack.pop().unwrap();
root_scope.post = json::encode(&flow::base(&*task_state.flow_root)).unwrap();
let result = json::encode(&root_scope).unwrap();
let path = Path::new("layout_trace.json");
let mut file = File::create(&path).unwrap();
file.write_str(result.as_slice()).unwrap();
}
|
Scope
|
identifier_name
|
layout_debug.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Supports writing a trace file created during each layout scope
//! that can be viewed by an external tool to make layout debugging easier.
#![macro_use]
use flow_ref::FlowRef;
use flow;
use rustc_serialize::json;
use std::borrow::ToOwned;
use std::cell::RefCell;
use std::old_io::File;
use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
thread_local!(static STATE_KEY: RefCell<Option<State>> = RefCell::new(None));
static mut DEBUG_ID_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
pub struct Scope;
#[macro_export]
macro_rules! layout_debug_scope(
($($arg:tt)*) => (
if cfg!(not(ndebug)) {
layout_debug::Scope::new(format!($($arg)*))
} else {
layout_debug::Scope
}
)
);
#[derive(RustcEncodable)]
struct ScopeData {
name: String,
pre: String,
post: String,
children: Vec<Box<ScopeData>>,
}
impl ScopeData {
fn new(name: String, pre: String) -> ScopeData {
ScopeData {
name: name,
pre: pre,
post: String::new(),
children: vec!(),
}
}
}
struct State {
flow_root: FlowRef,
scope_stack: Vec<Box<ScopeData>>,
}
/// A layout debugging scope. The entire state of the flow tree
/// will be output at the beginning and end of this scope.
impl Scope {
pub fn new(name: String) -> Scope {
STATE_KEY.with(|ref r| {
match &mut *r.borrow_mut() {
&mut Some(ref mut state) => {
let flow_trace = json::encode(&flow::base(&*state.flow_root)).unwrap();
let data = box ScopeData::new(name.clone(), flow_trace);
state.scope_stack.push(data);
}
&mut None => {}
}
});
Scope
}
}
#[cfg(not(ndebug))]
impl Drop for Scope {
fn drop(&mut self) {
STATE_KEY.with(|ref r| {
match &mut *r.borrow_mut() {
&mut Some(ref mut state) => {
let mut current_scope = state.scope_stack.pop().unwrap();
current_scope.post = json::encode(&flow::base(&*state.flow_root)).unwrap();
let previous_scope = state.scope_stack.last_mut().unwrap();
previous_scope.children.push(current_scope);
}
&mut None => {}
}
});
}
}
/// Generate a unique ID. This is used for items such as Fragment
/// which are often reallocated but represent essentially the
/// same data.
#[allow(unsafe_blocks)]
pub fn generate_unique_debug_id() -> u16 {
unsafe { DEBUG_ID_COUNTER.fetch_add(1, Ordering::SeqCst) as u16 }
}
/// Begin a layout debug trace. If this has not been called,
/// creating debug scopes has no effect.
pub fn begin_trace(flow_root: FlowRef) {
assert!(STATE_KEY.with(|ref r| r.borrow().is_none()));
|
};
*r.borrow_mut() = Some(state);
});
}
/// End the debug layout trace. This will write the layout
/// trace to disk in the current directory. The output
/// file can then be viewed with an external tool.
pub fn end_trace() {
let mut task_state = STATE_KEY.with(|ref r| r.borrow_mut().take().unwrap());
assert!(task_state.scope_stack.len() == 1);
let mut root_scope = task_state.scope_stack.pop().unwrap();
root_scope.post = json::encode(&flow::base(&*task_state.flow_root)).unwrap();
let result = json::encode(&root_scope).unwrap();
let path = Path::new("layout_trace.json");
let mut file = File::create(&path).unwrap();
file.write_str(result.as_slice()).unwrap();
}
|
STATE_KEY.with(|ref r| {
let flow_trace = json::encode(&flow::base(&*flow_root)).unwrap();
let state = State {
scope_stack: vec![box ScopeData::new("root".to_owned(), flow_trace)],
flow_root: flow_root.clone(),
|
random_line_split
|
layout_debug.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Supports writing a trace file created during each layout scope
//! that can be viewed by an external tool to make layout debugging easier.
#![macro_use]
use flow_ref::FlowRef;
use flow;
use rustc_serialize::json;
use std::borrow::ToOwned;
use std::cell::RefCell;
use std::old_io::File;
use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
thread_local!(static STATE_KEY: RefCell<Option<State>> = RefCell::new(None));
static mut DEBUG_ID_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
pub struct Scope;
#[macro_export]
macro_rules! layout_debug_scope(
($($arg:tt)*) => (
if cfg!(not(ndebug)) {
layout_debug::Scope::new(format!($($arg)*))
} else {
layout_debug::Scope
}
)
);
#[derive(RustcEncodable)]
struct ScopeData {
name: String,
pre: String,
post: String,
children: Vec<Box<ScopeData>>,
}
impl ScopeData {
fn new(name: String, pre: String) -> ScopeData {
ScopeData {
name: name,
pre: pre,
post: String::new(),
children: vec!(),
}
}
}
struct State {
flow_root: FlowRef,
scope_stack: Vec<Box<ScopeData>>,
}
/// A layout debugging scope. The entire state of the flow tree
/// will be output at the beginning and end of this scope.
impl Scope {
pub fn new(name: String) -> Scope {
STATE_KEY.with(|ref r| {
match &mut *r.borrow_mut() {
&mut Some(ref mut state) => {
let flow_trace = json::encode(&flow::base(&*state.flow_root)).unwrap();
let data = box ScopeData::new(name.clone(), flow_trace);
state.scope_stack.push(data);
}
&mut None => {}
}
});
Scope
}
}
#[cfg(not(ndebug))]
impl Drop for Scope {
fn drop(&mut self) {
STATE_KEY.with(|ref r| {
match &mut *r.borrow_mut() {
&mut Some(ref mut state) => {
let mut current_scope = state.scope_stack.pop().unwrap();
current_scope.post = json::encode(&flow::base(&*state.flow_root)).unwrap();
let previous_scope = state.scope_stack.last_mut().unwrap();
previous_scope.children.push(current_scope);
}
&mut None =>
|
}
});
}
}
/// Generate a unique ID. This is used for items such as Fragment
/// which are often reallocated but represent essentially the
/// same data.
#[allow(unsafe_blocks)]
pub fn generate_unique_debug_id() -> u16 {
unsafe { DEBUG_ID_COUNTER.fetch_add(1, Ordering::SeqCst) as u16 }
}
/// Begin a layout debug trace. If this has not been called,
/// creating debug scopes has no effect.
pub fn begin_trace(flow_root: FlowRef) {
assert!(STATE_KEY.with(|ref r| r.borrow().is_none()));
STATE_KEY.with(|ref r| {
let flow_trace = json::encode(&flow::base(&*flow_root)).unwrap();
let state = State {
scope_stack: vec![box ScopeData::new("root".to_owned(), flow_trace)],
flow_root: flow_root.clone(),
};
*r.borrow_mut() = Some(state);
});
}
/// End the debug layout trace. This will write the layout
/// trace to disk in the current directory. The output
/// file can then be viewed with an external tool.
pub fn end_trace() {
let mut task_state = STATE_KEY.with(|ref r| r.borrow_mut().take().unwrap());
assert!(task_state.scope_stack.len() == 1);
let mut root_scope = task_state.scope_stack.pop().unwrap();
root_scope.post = json::encode(&flow::base(&*task_state.flow_root)).unwrap();
let result = json::encode(&root_scope).unwrap();
let path = Path::new("layout_trace.json");
let mut file = File::create(&path).unwrap();
file.write_str(result.as_slice()).unwrap();
}
|
{}
|
conditional_block
|
issue-13853-5.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
|
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Deserializer<'a> { }
trait Deserializable {
fn deserialize_token<'a, D: Deserializer<'a>>(_: D, _: &'a str) -> Self;
}
impl<'a, T: Deserializable> Deserializable for &'a str {
//~^ ERROR type parameter `T` is not constrained
fn deserialize_token<D: Deserializer<'a>>(_x: D, _y: &'a str) -> &'a str {
}
}
fn main() {}
|
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
random_line_split
|
issue-13853-5.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Deserializer<'a> { }
trait Deserializable {
fn deserialize_token<'a, D: Deserializer<'a>>(_: D, _: &'a str) -> Self;
}
impl<'a, T: Deserializable> Deserializable for &'a str {
//~^ ERROR type parameter `T` is not constrained
fn deserialize_token<D: Deserializer<'a>>(_x: D, _y: &'a str) -> &'a str
|
}
fn main() {}
|
{
}
|
identifier_body
|
issue-13853-5.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Deserializer<'a> { }
trait Deserializable {
fn deserialize_token<'a, D: Deserializer<'a>>(_: D, _: &'a str) -> Self;
}
impl<'a, T: Deserializable> Deserializable for &'a str {
//~^ ERROR type parameter `T` is not constrained
fn deserialize_token<D: Deserializer<'a>>(_x: D, _y: &'a str) -> &'a str {
}
}
fn
|
() {}
|
main
|
identifier_name
|
main.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! The `servo` test application.
//!
//! Creates a `Servo` instance with a simple implementation of
//! the compositor's `WindowMethods` to create a working web browser.
//!
//! This browser's implementation of `WindowMethods` is built on top
//! of [glutin], the cross-platform OpenGL utility and windowing
//! library.
//!
//! For the engine itself look next door in `components/servo/lib.rs`.
//!
//! [glutin]: https://github.com/tomaka/glutin
#![cfg_attr(feature = "unstable", feature(core_intrinsics))]
// Have this here rather than in non_android_main.rs to work around
// https://github.com/rust-lang/rust/issues/53205
#[cfg(not(target_os = "android"))]
#[macro_use]
extern crate log;
#[cfg(not(target_os = "android"))]
include!("non_android_main.rs");
#[cfg(target_os = "android")]
pub fn
|
() {
println!(
"Cannot start /ports/servo/ on Android. \
Use /support/android/apk/ + /ports/libsimpleservo/ instead"
);
}
|
main
|
identifier_name
|
main.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! The `servo` test application.
//!
//! Creates a `Servo` instance with a simple implementation of
//! the compositor's `WindowMethods` to create a working web browser.
//!
//! This browser's implementation of `WindowMethods` is built on top
//! of [glutin], the cross-platform OpenGL utility and windowing
//! library.
//!
//! For the engine itself look next door in `components/servo/lib.rs`.
//!
//! [glutin]: https://github.com/tomaka/glutin
#![cfg_attr(feature = "unstable", feature(core_intrinsics))]
|
extern crate log;
#[cfg(not(target_os = "android"))]
include!("non_android_main.rs");
#[cfg(target_os = "android")]
pub fn main() {
println!(
"Cannot start /ports/servo/ on Android. \
Use /support/android/apk/ + /ports/libsimpleservo/ instead"
);
}
|
// Have this here rather than in non_android_main.rs to work around
// https://github.com/rust-lang/rust/issues/53205
#[cfg(not(target_os = "android"))]
#[macro_use]
|
random_line_split
|
main.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! The `servo` test application.
//!
//! Creates a `Servo` instance with a simple implementation of
//! the compositor's `WindowMethods` to create a working web browser.
//!
//! This browser's implementation of `WindowMethods` is built on top
//! of [glutin], the cross-platform OpenGL utility and windowing
//! library.
//!
//! For the engine itself look next door in `components/servo/lib.rs`.
//!
//! [glutin]: https://github.com/tomaka/glutin
#![cfg_attr(feature = "unstable", feature(core_intrinsics))]
// Have this here rather than in non_android_main.rs to work around
// https://github.com/rust-lang/rust/issues/53205
#[cfg(not(target_os = "android"))]
#[macro_use]
extern crate log;
#[cfg(not(target_os = "android"))]
include!("non_android_main.rs");
#[cfg(target_os = "android")]
pub fn main()
|
{
println!(
"Cannot start /ports/servo/ on Android. \
Use /support/android/apk/ + /ports/libsimpleservo/ instead"
);
}
|
identifier_body
|
|
function-call.rs
|
// This test does not passed with gdb < 8.0. See #53497.
// min-gdb-version: 10.1
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// gdb-command:print fun(45, true)
// gdb-check:$1 = true
// gdb-command:print fun(444, false)
// gdb-check:$2 = false
// gdb-command:print r.get_x()
// gdb-check:$3 = 4
#![allow(dead_code, unused_variables)]
struct RegularStruct {
x: i32
}
impl RegularStruct {
fn get_x(&self) -> i32 {
self.x
}
}
|
let _ = r.get_x();
zzz(); // #break
}
fn fun(x: isize, y: bool) -> bool {
y
}
fn zzz() { () }
|
fn main() {
let _ = fun(4, true);
let r = RegularStruct{x: 4};
|
random_line_split
|
function-call.rs
|
// This test does not passed with gdb < 8.0. See #53497.
// min-gdb-version: 10.1
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// gdb-command:print fun(45, true)
// gdb-check:$1 = true
// gdb-command:print fun(444, false)
// gdb-check:$2 = false
// gdb-command:print r.get_x()
// gdb-check:$3 = 4
#![allow(dead_code, unused_variables)]
struct RegularStruct {
x: i32
}
impl RegularStruct {
fn get_x(&self) -> i32 {
self.x
}
}
fn
|
() {
let _ = fun(4, true);
let r = RegularStruct{x: 4};
let _ = r.get_x();
zzz(); // #break
}
fn fun(x: isize, y: bool) -> bool {
y
}
fn zzz() { () }
|
main
|
identifier_name
|
util.rs
|
#![allow(clippy::declare_interior_mutable_const)]
use std::{
cell::{RefCell, RefMut},
cmp,
convert::Infallible,
io,
};
use xitca_web::{
dev::bytes::{Bytes, BytesMut},
http::{
header::{HeaderValue, SERVER},
StatusCode,
},
response::{WebResponse, WebResponseBuilder},
};
pub(super) type HandleResult = Result<WebResponse, Infallible>;
pub(super) struct Writer<'a>(RefMut<'a, BytesMut>);
impl Writer<'_> {
#[inline]
pub fn take(mut self) -> Bytes {
self.0.split().freeze()
}
}
impl io::Write for &mut Writer<'_> {
#[inline]
fn
|
(&mut self, buf: &[u8]) -> io::Result<usize> {
self.0.extend_from_slice(buf);
Ok(buf.len())
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
pub(super) trait QueryParse {
fn parse_query(self) -> u16;
}
impl QueryParse for Option<&str> {
fn parse_query(self) -> u16 {
let num = self
.and_then(|this| {
use atoi::FromRadix10;
this.find('q')
.map(|pos| u16::from_radix_10(this.split_at(pos + 2).1.as_ref()).0)
})
.unwrap_or(1);
cmp::min(500, cmp::max(1, num))
}
}
pub(super) struct AppState<C> {
client: C,
// a re-usable buffer for write response data.
write_buf: RefCell<BytesMut>,
}
impl<C> AppState<C> {
pub(super) fn new(client: C) -> Self {
let write_buf = RefCell::new(BytesMut::new());
Self { client, write_buf }
}
#[inline]
pub(super) fn writer(&self) -> Writer<'_> {
Writer(self.write_buf.borrow_mut())
}
#[inline]
pub(super) fn client(&self) -> &C {
&self.client
}
}
pub const SERVER_HEADER_VALUE: HeaderValue = HeaderValue::from_static("TFB");
pub const HTML_HEADER_VALUE: HeaderValue = HeaderValue::from_static("text/html; charset=utf-8");
pub const TEXT_HEADER_VALUE: HeaderValue = HeaderValue::from_static("text/plain");
pub const JSON_HEADER_VALUE: HeaderValue = HeaderValue::from_static("application/json");
macro_rules! error {
($error: ident, $code: path) => {
#[cold]
#[inline(never)]
pub(super) fn $error() -> HandleResult {
Ok(WebResponseBuilder::new()
.status($code)
.header(SERVER, SERVER_HEADER_VALUE)
.body(Bytes::new().into())
.unwrap())
}
};
}
error!(not_found, StatusCode::NOT_FOUND);
error!(internal, StatusCode::INTERNAL_SERVER_ERROR);
|
write
|
identifier_name
|
util.rs
|
#![allow(clippy::declare_interior_mutable_const)]
use std::{
cell::{RefCell, RefMut},
cmp,
convert::Infallible,
io,
};
use xitca_web::{
dev::bytes::{Bytes, BytesMut},
http::{
header::{HeaderValue, SERVER},
StatusCode,
},
response::{WebResponse, WebResponseBuilder},
};
pub(super) type HandleResult = Result<WebResponse, Infallible>;
pub(super) struct Writer<'a>(RefMut<'a, BytesMut>);
impl Writer<'_> {
#[inline]
pub fn take(mut self) -> Bytes {
self.0.split().freeze()
}
}
impl io::Write for &mut Writer<'_> {
#[inline]
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.0.extend_from_slice(buf);
Ok(buf.len())
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
pub(super) trait QueryParse {
fn parse_query(self) -> u16;
}
impl QueryParse for Option<&str> {
fn parse_query(self) -> u16 {
let num = self
.and_then(|this| {
use atoi::FromRadix10;
this.find('q')
.map(|pos| u16::from_radix_10(this.split_at(pos + 2).1.as_ref()).0)
})
.unwrap_or(1);
|
pub(super) struct AppState<C> {
client: C,
// a re-usable buffer for write response data.
write_buf: RefCell<BytesMut>,
}
impl<C> AppState<C> {
pub(super) fn new(client: C) -> Self {
let write_buf = RefCell::new(BytesMut::new());
Self { client, write_buf }
}
#[inline]
pub(super) fn writer(&self) -> Writer<'_> {
Writer(self.write_buf.borrow_mut())
}
#[inline]
pub(super) fn client(&self) -> &C {
&self.client
}
}
pub const SERVER_HEADER_VALUE: HeaderValue = HeaderValue::from_static("TFB");
pub const HTML_HEADER_VALUE: HeaderValue = HeaderValue::from_static("text/html; charset=utf-8");
pub const TEXT_HEADER_VALUE: HeaderValue = HeaderValue::from_static("text/plain");
pub const JSON_HEADER_VALUE: HeaderValue = HeaderValue::from_static("application/json");
macro_rules! error {
($error: ident, $code: path) => {
#[cold]
#[inline(never)]
pub(super) fn $error() -> HandleResult {
Ok(WebResponseBuilder::new()
.status($code)
.header(SERVER, SERVER_HEADER_VALUE)
.body(Bytes::new().into())
.unwrap())
}
};
}
error!(not_found, StatusCode::NOT_FOUND);
error!(internal, StatusCode::INTERNAL_SERVER_ERROR);
|
cmp::min(500, cmp::max(1, num))
}
}
|
random_line_split
|
util.rs
|
#![allow(clippy::declare_interior_mutable_const)]
use std::{
cell::{RefCell, RefMut},
cmp,
convert::Infallible,
io,
};
use xitca_web::{
dev::bytes::{Bytes, BytesMut},
http::{
header::{HeaderValue, SERVER},
StatusCode,
},
response::{WebResponse, WebResponseBuilder},
};
pub(super) type HandleResult = Result<WebResponse, Infallible>;
pub(super) struct Writer<'a>(RefMut<'a, BytesMut>);
impl Writer<'_> {
#[inline]
pub fn take(mut self) -> Bytes {
self.0.split().freeze()
}
}
impl io::Write for &mut Writer<'_> {
#[inline]
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.0.extend_from_slice(buf);
Ok(buf.len())
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
pub(super) trait QueryParse {
fn parse_query(self) -> u16;
}
impl QueryParse for Option<&str> {
fn parse_query(self) -> u16 {
let num = self
.and_then(|this| {
use atoi::FromRadix10;
this.find('q')
.map(|pos| u16::from_radix_10(this.split_at(pos + 2).1.as_ref()).0)
})
.unwrap_or(1);
cmp::min(500, cmp::max(1, num))
}
}
pub(super) struct AppState<C> {
client: C,
// a re-usable buffer for write response data.
write_buf: RefCell<BytesMut>,
}
impl<C> AppState<C> {
pub(super) fn new(client: C) -> Self
|
#[inline]
pub(super) fn writer(&self) -> Writer<'_> {
Writer(self.write_buf.borrow_mut())
}
#[inline]
pub(super) fn client(&self) -> &C {
&self.client
}
}
pub const SERVER_HEADER_VALUE: HeaderValue = HeaderValue::from_static("TFB");
pub const HTML_HEADER_VALUE: HeaderValue = HeaderValue::from_static("text/html; charset=utf-8");
pub const TEXT_HEADER_VALUE: HeaderValue = HeaderValue::from_static("text/plain");
pub const JSON_HEADER_VALUE: HeaderValue = HeaderValue::from_static("application/json");
macro_rules! error {
($error: ident, $code: path) => {
#[cold]
#[inline(never)]
pub(super) fn $error() -> HandleResult {
Ok(WebResponseBuilder::new()
.status($code)
.header(SERVER, SERVER_HEADER_VALUE)
.body(Bytes::new().into())
.unwrap())
}
};
}
error!(not_found, StatusCode::NOT_FOUND);
error!(internal, StatusCode::INTERNAL_SERVER_ERROR);
|
{
let write_buf = RefCell::new(BytesMut::new());
Self { client, write_buf }
}
|
identifier_body
|
map.rs
|
#[cfg(feature = "vec_map")]
pub use vec_map::{Values, VecMap};
#[cfg(not(feature = "vec_map"))]
pub use self::vec_map::{Values, VecMap};
#[cfg(not(feature = "vec_map"))]
mod vec_map {
use std::collections::btree_map;
use std::collections::BTreeMap;
use std::fmt::{self, Debug, Formatter};
#[derive(Clone, Default, Debug)]
pub struct VecMap<V> {
inner: BTreeMap<usize, V>,
}
impl<V> VecMap<V> {
pub fn new() -> Self {
VecMap {
inner: Default::default(),
}
}
pub fn len(&self) -> usize {
self.inner.len()
}
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
pub fn insert(&mut self, key: usize, value: V) -> Option<V> {
self.inner.insert(key, value)
}
pub fn values(&self) -> Values<V> {
self.inner.values()
}
pub fn iter(&self) -> Iter<V>
|
pub fn contains_key(&self, key: usize) -> bool {
self.inner.contains_key(&key)
}
pub fn entry(&mut self, key: usize) -> Entry<V> {
self.inner.entry(key)
}
pub fn get(&self, key: usize) -> Option<&V> {
self.inner.get(&key)
}
}
pub type Values<'a, V> = btree_map::Values<'a, usize, V>;
pub type Entry<'a, V> = btree_map::Entry<'a, usize, V>;
#[derive(Clone)]
pub struct Iter<'a, V: 'a> {
inner: btree_map::Iter<'a, usize, V>,
}
impl<'a, V: 'a + Debug> Debug for Iter<'a, V> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_list().entries(self.inner.clone()).finish()
}
}
impl<'a, V: 'a> Iterator for Iter<'a, V> {
type Item = (usize, &'a V);
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|(k, v)| (*k, v))
}
}
impl<'a, V: 'a> DoubleEndedIterator for Iter<'a, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.inner.next_back().map(|(k, v)| (*k, v))
}
}
}
|
{
Iter {
inner: self.inner.iter(),
}
}
|
identifier_body
|
map.rs
|
#[cfg(feature = "vec_map")]
pub use vec_map::{Values, VecMap};
#[cfg(not(feature = "vec_map"))]
pub use self::vec_map::{Values, VecMap};
#[cfg(not(feature = "vec_map"))]
mod vec_map {
use std::collections::btree_map;
use std::collections::BTreeMap;
use std::fmt::{self, Debug, Formatter};
#[derive(Clone, Default, Debug)]
pub struct VecMap<V> {
inner: BTreeMap<usize, V>,
}
impl<V> VecMap<V> {
pub fn new() -> Self {
VecMap {
inner: Default::default(),
}
}
pub fn len(&self) -> usize {
self.inner.len()
}
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
pub fn insert(&mut self, key: usize, value: V) -> Option<V> {
self.inner.insert(key, value)
}
pub fn
|
(&self) -> Values<V> {
self.inner.values()
}
pub fn iter(&self) -> Iter<V> {
Iter {
inner: self.inner.iter(),
}
}
pub fn contains_key(&self, key: usize) -> bool {
self.inner.contains_key(&key)
}
pub fn entry(&mut self, key: usize) -> Entry<V> {
self.inner.entry(key)
}
pub fn get(&self, key: usize) -> Option<&V> {
self.inner.get(&key)
}
}
pub type Values<'a, V> = btree_map::Values<'a, usize, V>;
pub type Entry<'a, V> = btree_map::Entry<'a, usize, V>;
#[derive(Clone)]
pub struct Iter<'a, V: 'a> {
inner: btree_map::Iter<'a, usize, V>,
}
impl<'a, V: 'a + Debug> Debug for Iter<'a, V> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_list().entries(self.inner.clone()).finish()
}
}
impl<'a, V: 'a> Iterator for Iter<'a, V> {
type Item = (usize, &'a V);
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|(k, v)| (*k, v))
}
}
impl<'a, V: 'a> DoubleEndedIterator for Iter<'a, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.inner.next_back().map(|(k, v)| (*k, v))
}
}
}
|
values
|
identifier_name
|
map.rs
|
#[cfg(feature = "vec_map")]
pub use vec_map::{Values, VecMap};
#[cfg(not(feature = "vec_map"))]
pub use self::vec_map::{Values, VecMap};
#[cfg(not(feature = "vec_map"))]
mod vec_map {
use std::collections::btree_map;
use std::collections::BTreeMap;
use std::fmt::{self, Debug, Formatter};
#[derive(Clone, Default, Debug)]
pub struct VecMap<V> {
inner: BTreeMap<usize, V>,
}
impl<V> VecMap<V> {
pub fn new() -> Self {
VecMap {
inner: Default::default(),
}
}
pub fn len(&self) -> usize {
|
self.inner.is_empty()
}
pub fn insert(&mut self, key: usize, value: V) -> Option<V> {
self.inner.insert(key, value)
}
pub fn values(&self) -> Values<V> {
self.inner.values()
}
pub fn iter(&self) -> Iter<V> {
Iter {
inner: self.inner.iter(),
}
}
pub fn contains_key(&self, key: usize) -> bool {
self.inner.contains_key(&key)
}
pub fn entry(&mut self, key: usize) -> Entry<V> {
self.inner.entry(key)
}
pub fn get(&self, key: usize) -> Option<&V> {
self.inner.get(&key)
}
}
pub type Values<'a, V> = btree_map::Values<'a, usize, V>;
pub type Entry<'a, V> = btree_map::Entry<'a, usize, V>;
#[derive(Clone)]
pub struct Iter<'a, V: 'a> {
inner: btree_map::Iter<'a, usize, V>,
}
impl<'a, V: 'a + Debug> Debug for Iter<'a, V> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_list().entries(self.inner.clone()).finish()
}
}
impl<'a, V: 'a> Iterator for Iter<'a, V> {
type Item = (usize, &'a V);
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|(k, v)| (*k, v))
}
}
impl<'a, V: 'a> DoubleEndedIterator for Iter<'a, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.inner.next_back().map(|(k, v)| (*k, v))
}
}
}
|
self.inner.len()
}
pub fn is_empty(&self) -> bool {
|
random_line_split
|
pairs.rs
|
use rustc_ast::ast;
use crate::config::lists::*;
use crate::config::IndentStyle;
use crate::rewrite::{Rewrite, RewriteContext};
use crate::shape::Shape;
use crate::utils::{
first_line_width, is_single_line, last_line_width, trimmed_last_line_width, wrap_str,
};
/// Sigils that decorate a binop pair.
#[derive(new, Clone, Copy)]
pub(crate) struct PairParts<'a> {
prefix: &'a str,
infix: &'a str,
suffix: &'a str,
}
impl<'a> PairParts<'a> {
pub(crate) fn infix(infix: &'a str) -> PairParts<'a> {
PairParts {
prefix: "",
infix,
suffix: "",
}
}
}
// Flattens a tree of pairs into a list and tries to rewrite them all at once.
// FIXME would be nice to reuse the lists API for this, but because each separator
// can be different, we can't.
pub(crate) fn
|
(
expr: &ast::Expr,
shape: Shape,
context: &RewriteContext<'_>,
) -> Option<String> {
expr.flatten(context, shape).and_then(|list| {
// First we try formatting on one line.
rewrite_pairs_one_line(&list, shape, context)
.or_else(|| rewrite_pairs_multiline(&list, shape, context))
})
}
// This may return a multi-line result since we allow the last expression to go
// multiline in a'single line' formatting.
fn rewrite_pairs_one_line<T: Rewrite>(
list: &PairList<'_, '_, T>,
shape: Shape,
context: &RewriteContext<'_>,
) -> Option<String> {
assert!(list.list.len() >= 2, "Not a pair?");
let mut result = String::new();
let base_shape = shape.block();
for ((_, rewrite), s) in list.list.iter().zip(list.separators.iter()) {
if let Some(rewrite) = rewrite {
if!is_single_line(&rewrite) || result.len() > shape.width {
return None;
}
result.push_str(&rewrite);
result.push(' ');
result.push_str(s);
result.push(' ');
} else {
return None;
}
}
let prefix_len = result.len();
let last = list.list.last()?.0;
let cur_shape = base_shape.offset_left(last_line_width(&result))?;
let last_rewrite = last.rewrite(context, cur_shape)?;
result.push_str(&last_rewrite);
if first_line_width(&result) > shape.width {
return None;
}
// Check the last expression in the list. We sometimes let this expression
// go over multiple lines, but we check for some ugly conditions.
if!(is_single_line(&result) || last_rewrite.starts_with('{'))
&& (last_rewrite.starts_with('(') || prefix_len > context.config.tab_spaces())
{
return None;
}
wrap_str(result, context.config.max_width(), shape)
}
fn rewrite_pairs_multiline<T: Rewrite>(
list: &PairList<'_, '_, T>,
shape: Shape,
context: &RewriteContext<'_>,
) -> Option<String> {
let rhs_offset = shape.rhs_overhead(&context.config);
let nested_shape = (match context.config.indent_style() {
IndentStyle::Visual => shape.visual_indent(0),
IndentStyle::Block => shape.block_indent(context.config.tab_spaces()),
})
.with_max_width(&context.config)
.sub_width(rhs_offset)?;
let indent_str = nested_shape.indent.to_string_with_newline(context.config);
let mut result = String::new();
result.push_str(&list.list[0].1.as_ref()?);
for ((e, default_rw), s) in list.list[1..].iter().zip(list.separators.iter()) {
// The following test checks if we should keep two subexprs on the same
// line. We do this if not doing so would create an orphan and there is
// enough space to do so.
let offset = if result.contains('\n') {
0
} else {
shape.used_width()
};
if last_line_width(&result) + offset <= nested_shape.used_width() {
// We must snuggle the next line onto the previous line to avoid an orphan.
if let Some(line_shape) =
shape.offset_left(s.len() + 2 + trimmed_last_line_width(&result))
{
if let Some(rewrite) = e.rewrite(context, line_shape) {
result.push(' ');
result.push_str(s);
result.push(' ');
result.push_str(&rewrite);
continue;
}
}
}
match context.config.binop_separator() {
SeparatorPlace::Back => {
result.push(' ');
result.push_str(s);
result.push_str(&indent_str);
}
SeparatorPlace::Front => {
result.push_str(&indent_str);
result.push_str(s);
result.push(' ');
}
}
result.push_str(&default_rw.as_ref()?);
}
Some(result)
}
// Rewrites a single pair.
pub(crate) fn rewrite_pair<LHS, RHS>(
lhs: &LHS,
rhs: &RHS,
pp: PairParts<'_>,
context: &RewriteContext<'_>,
shape: Shape,
separator_place: SeparatorPlace,
) -> Option<String>
where
LHS: Rewrite,
RHS: Rewrite,
{
let tab_spaces = context.config.tab_spaces();
let lhs_overhead = match separator_place {
SeparatorPlace::Back => shape.used_width() + pp.prefix.len() + pp.infix.trim_end().len(),
SeparatorPlace::Front => shape.used_width(),
};
let lhs_shape = Shape {
width: context.budget(lhs_overhead),
..shape
};
let lhs_result = lhs
.rewrite(context, lhs_shape)
.map(|lhs_str| format!("{}{}", pp.prefix, lhs_str))?;
// Try to put both lhs and rhs on the same line.
let rhs_orig_result = shape
.offset_left(last_line_width(&lhs_result) + pp.infix.len())
.and_then(|s| s.sub_width(pp.suffix.len()))
.and_then(|rhs_shape| rhs.rewrite(context, rhs_shape));
if let Some(ref rhs_result) = rhs_orig_result {
// If the length of the lhs is equal to or shorter than the tab width or
// the rhs looks like block expression, we put the rhs on the same
// line with the lhs even if the rhs is multi-lined.
let allow_same_line = lhs_result.len() <= tab_spaces
|| rhs_result
.lines()
.next()
.map(|first_line| first_line.ends_with('{'))
.unwrap_or(false);
if!rhs_result.contains('\n') || allow_same_line {
let one_line_width = last_line_width(&lhs_result)
+ pp.infix.len()
+ first_line_width(rhs_result)
+ pp.suffix.len();
if one_line_width <= shape.width {
return Some(format!(
"{}{}{}{}",
lhs_result, pp.infix, rhs_result, pp.suffix
));
}
}
}
// We have to use multiple lines.
// Re-evaluate the rhs because we have more space now:
let mut rhs_shape = match context.config.indent_style() {
IndentStyle::Visual => shape
.sub_width(pp.suffix.len() + pp.prefix.len())?
.visual_indent(pp.prefix.len()),
IndentStyle::Block => {
// Try to calculate the initial constraint on the right hand side.
let rhs_overhead = shape.rhs_overhead(context.config);
Shape::indented(shape.indent.block_indent(context.config), context.config)
.sub_width(rhs_overhead)?
}
};
let infix = match separator_place {
SeparatorPlace::Back => pp.infix.trim_end(),
SeparatorPlace::Front => pp.infix.trim_start(),
};
if separator_place == SeparatorPlace::Front {
rhs_shape = rhs_shape.offset_left(infix.len())?;
}
let rhs_result = rhs.rewrite(context, rhs_shape)?;
let indent_str = rhs_shape.indent.to_string_with_newline(context.config);
let infix_with_sep = match separator_place {
SeparatorPlace::Back => format!("{}{}", infix, indent_str),
SeparatorPlace::Front => format!("{}{}", indent_str, infix),
};
Some(format!(
"{}{}{}{}",
lhs_result, infix_with_sep, rhs_result, pp.suffix
))
}
// A pair which forms a tree and can be flattened (e.g., binops).
trait FlattenPair: Rewrite + Sized {
fn flatten(&self, _: &RewriteContext<'_>, _: Shape) -> Option<PairList<'_, '_, Self>> {
None
}
}
struct PairList<'a, 'b, T: Rewrite> {
list: Vec<(&'b T, Option<String>)>,
separators: Vec<&'a str>,
}
impl FlattenPair for ast::Expr {
fn flatten(
&self,
context: &RewriteContext<'_>,
shape: Shape,
) -> Option<PairList<'_, '_, ast::Expr>> {
let top_op = match self.kind {
ast::ExprKind::Binary(op, _, _) => op.node,
_ => return None,
};
let default_rewrite = |node: &ast::Expr, sep: usize, is_first: bool| {
if is_first {
return node.rewrite(context, shape);
}
let nested_overhead = sep + 1;
let rhs_offset = shape.rhs_overhead(&context.config);
let nested_shape = (match context.config.indent_style() {
IndentStyle::Visual => shape.visual_indent(0),
IndentStyle::Block => shape.block_indent(context.config.tab_spaces()),
})
.with_max_width(&context.config)
.sub_width(rhs_offset)?;
let default_shape = match context.config.binop_separator() {
SeparatorPlace::Back => nested_shape.sub_width(nested_overhead)?,
SeparatorPlace::Front => nested_shape.offset_left(nested_overhead)?,
};
node.rewrite(context, default_shape)
};
// Turn a tree of binop expressions into a list using a depth-first,
// in-order traversal.
let mut stack = vec![];
let mut list = vec![];
let mut separators = vec![];
let mut node = self;
loop {
match node.kind {
ast::ExprKind::Binary(op, ref lhs, _) if op.node == top_op => {
stack.push(node);
node = lhs;
}
_ => {
let op_len = separators.last().map_or(0, |s: &&str| s.len());
let rw = default_rewrite(node, op_len, list.is_empty());
list.push((node, rw));
if let Some(pop) = stack.pop() {
match pop.kind {
ast::ExprKind::Binary(op, _, ref rhs) => {
separators.push(op.node.to_string());
node = rhs;
}
_ => unreachable!(),
}
} else {
break;
}
}
}
}
assert_eq!(list.len() - 1, separators.len());
Some(PairList { list, separators })
}
}
impl FlattenPair for ast::Ty {}
impl FlattenPair for ast::Pat {}
|
rewrite_all_pairs
|
identifier_name
|
pairs.rs
|
use rustc_ast::ast;
use crate::config::lists::*;
use crate::config::IndentStyle;
use crate::rewrite::{Rewrite, RewriteContext};
use crate::shape::Shape;
use crate::utils::{
first_line_width, is_single_line, last_line_width, trimmed_last_line_width, wrap_str,
};
/// Sigils that decorate a binop pair.
|
infix: &'a str,
suffix: &'a str,
}
impl<'a> PairParts<'a> {
pub(crate) fn infix(infix: &'a str) -> PairParts<'a> {
PairParts {
prefix: "",
infix,
suffix: "",
}
}
}
// Flattens a tree of pairs into a list and tries to rewrite them all at once.
// FIXME would be nice to reuse the lists API for this, but because each separator
// can be different, we can't.
pub(crate) fn rewrite_all_pairs(
expr: &ast::Expr,
shape: Shape,
context: &RewriteContext<'_>,
) -> Option<String> {
expr.flatten(context, shape).and_then(|list| {
// First we try formatting on one line.
rewrite_pairs_one_line(&list, shape, context)
.or_else(|| rewrite_pairs_multiline(&list, shape, context))
})
}
// This may return a multi-line result since we allow the last expression to go
// multiline in a'single line' formatting.
fn rewrite_pairs_one_line<T: Rewrite>(
list: &PairList<'_, '_, T>,
shape: Shape,
context: &RewriteContext<'_>,
) -> Option<String> {
assert!(list.list.len() >= 2, "Not a pair?");
let mut result = String::new();
let base_shape = shape.block();
for ((_, rewrite), s) in list.list.iter().zip(list.separators.iter()) {
if let Some(rewrite) = rewrite {
if!is_single_line(&rewrite) || result.len() > shape.width {
return None;
}
result.push_str(&rewrite);
result.push(' ');
result.push_str(s);
result.push(' ');
} else {
return None;
}
}
let prefix_len = result.len();
let last = list.list.last()?.0;
let cur_shape = base_shape.offset_left(last_line_width(&result))?;
let last_rewrite = last.rewrite(context, cur_shape)?;
result.push_str(&last_rewrite);
if first_line_width(&result) > shape.width {
return None;
}
// Check the last expression in the list. We sometimes let this expression
// go over multiple lines, but we check for some ugly conditions.
if!(is_single_line(&result) || last_rewrite.starts_with('{'))
&& (last_rewrite.starts_with('(') || prefix_len > context.config.tab_spaces())
{
return None;
}
wrap_str(result, context.config.max_width(), shape)
}
fn rewrite_pairs_multiline<T: Rewrite>(
list: &PairList<'_, '_, T>,
shape: Shape,
context: &RewriteContext<'_>,
) -> Option<String> {
let rhs_offset = shape.rhs_overhead(&context.config);
let nested_shape = (match context.config.indent_style() {
IndentStyle::Visual => shape.visual_indent(0),
IndentStyle::Block => shape.block_indent(context.config.tab_spaces()),
})
.with_max_width(&context.config)
.sub_width(rhs_offset)?;
let indent_str = nested_shape.indent.to_string_with_newline(context.config);
let mut result = String::new();
result.push_str(&list.list[0].1.as_ref()?);
for ((e, default_rw), s) in list.list[1..].iter().zip(list.separators.iter()) {
// The following test checks if we should keep two subexprs on the same
// line. We do this if not doing so would create an orphan and there is
// enough space to do so.
let offset = if result.contains('\n') {
0
} else {
shape.used_width()
};
if last_line_width(&result) + offset <= nested_shape.used_width() {
// We must snuggle the next line onto the previous line to avoid an orphan.
if let Some(line_shape) =
shape.offset_left(s.len() + 2 + trimmed_last_line_width(&result))
{
if let Some(rewrite) = e.rewrite(context, line_shape) {
result.push(' ');
result.push_str(s);
result.push(' ');
result.push_str(&rewrite);
continue;
}
}
}
match context.config.binop_separator() {
SeparatorPlace::Back => {
result.push(' ');
result.push_str(s);
result.push_str(&indent_str);
}
SeparatorPlace::Front => {
result.push_str(&indent_str);
result.push_str(s);
result.push(' ');
}
}
result.push_str(&default_rw.as_ref()?);
}
Some(result)
}
// Rewrites a single pair.
pub(crate) fn rewrite_pair<LHS, RHS>(
lhs: &LHS,
rhs: &RHS,
pp: PairParts<'_>,
context: &RewriteContext<'_>,
shape: Shape,
separator_place: SeparatorPlace,
) -> Option<String>
where
LHS: Rewrite,
RHS: Rewrite,
{
let tab_spaces = context.config.tab_spaces();
let lhs_overhead = match separator_place {
SeparatorPlace::Back => shape.used_width() + pp.prefix.len() + pp.infix.trim_end().len(),
SeparatorPlace::Front => shape.used_width(),
};
let lhs_shape = Shape {
width: context.budget(lhs_overhead),
..shape
};
let lhs_result = lhs
.rewrite(context, lhs_shape)
.map(|lhs_str| format!("{}{}", pp.prefix, lhs_str))?;
// Try to put both lhs and rhs on the same line.
let rhs_orig_result = shape
.offset_left(last_line_width(&lhs_result) + pp.infix.len())
.and_then(|s| s.sub_width(pp.suffix.len()))
.and_then(|rhs_shape| rhs.rewrite(context, rhs_shape));
if let Some(ref rhs_result) = rhs_orig_result {
// If the length of the lhs is equal to or shorter than the tab width or
// the rhs looks like block expression, we put the rhs on the same
// line with the lhs even if the rhs is multi-lined.
let allow_same_line = lhs_result.len() <= tab_spaces
|| rhs_result
.lines()
.next()
.map(|first_line| first_line.ends_with('{'))
.unwrap_or(false);
if!rhs_result.contains('\n') || allow_same_line {
let one_line_width = last_line_width(&lhs_result)
+ pp.infix.len()
+ first_line_width(rhs_result)
+ pp.suffix.len();
if one_line_width <= shape.width {
return Some(format!(
"{}{}{}{}",
lhs_result, pp.infix, rhs_result, pp.suffix
));
}
}
}
// We have to use multiple lines.
// Re-evaluate the rhs because we have more space now:
let mut rhs_shape = match context.config.indent_style() {
IndentStyle::Visual => shape
.sub_width(pp.suffix.len() + pp.prefix.len())?
.visual_indent(pp.prefix.len()),
IndentStyle::Block => {
// Try to calculate the initial constraint on the right hand side.
let rhs_overhead = shape.rhs_overhead(context.config);
Shape::indented(shape.indent.block_indent(context.config), context.config)
.sub_width(rhs_overhead)?
}
};
let infix = match separator_place {
SeparatorPlace::Back => pp.infix.trim_end(),
SeparatorPlace::Front => pp.infix.trim_start(),
};
if separator_place == SeparatorPlace::Front {
rhs_shape = rhs_shape.offset_left(infix.len())?;
}
let rhs_result = rhs.rewrite(context, rhs_shape)?;
let indent_str = rhs_shape.indent.to_string_with_newline(context.config);
let infix_with_sep = match separator_place {
SeparatorPlace::Back => format!("{}{}", infix, indent_str),
SeparatorPlace::Front => format!("{}{}", indent_str, infix),
};
Some(format!(
"{}{}{}{}",
lhs_result, infix_with_sep, rhs_result, pp.suffix
))
}
// A pair which forms a tree and can be flattened (e.g., binops).
trait FlattenPair: Rewrite + Sized {
fn flatten(&self, _: &RewriteContext<'_>, _: Shape) -> Option<PairList<'_, '_, Self>> {
None
}
}
struct PairList<'a, 'b, T: Rewrite> {
list: Vec<(&'b T, Option<String>)>,
separators: Vec<&'a str>,
}
impl FlattenPair for ast::Expr {
fn flatten(
&self,
context: &RewriteContext<'_>,
shape: Shape,
) -> Option<PairList<'_, '_, ast::Expr>> {
let top_op = match self.kind {
ast::ExprKind::Binary(op, _, _) => op.node,
_ => return None,
};
let default_rewrite = |node: &ast::Expr, sep: usize, is_first: bool| {
if is_first {
return node.rewrite(context, shape);
}
let nested_overhead = sep + 1;
let rhs_offset = shape.rhs_overhead(&context.config);
let nested_shape = (match context.config.indent_style() {
IndentStyle::Visual => shape.visual_indent(0),
IndentStyle::Block => shape.block_indent(context.config.tab_spaces()),
})
.with_max_width(&context.config)
.sub_width(rhs_offset)?;
let default_shape = match context.config.binop_separator() {
SeparatorPlace::Back => nested_shape.sub_width(nested_overhead)?,
SeparatorPlace::Front => nested_shape.offset_left(nested_overhead)?,
};
node.rewrite(context, default_shape)
};
// Turn a tree of binop expressions into a list using a depth-first,
// in-order traversal.
let mut stack = vec![];
let mut list = vec![];
let mut separators = vec![];
let mut node = self;
loop {
match node.kind {
ast::ExprKind::Binary(op, ref lhs, _) if op.node == top_op => {
stack.push(node);
node = lhs;
}
_ => {
let op_len = separators.last().map_or(0, |s: &&str| s.len());
let rw = default_rewrite(node, op_len, list.is_empty());
list.push((node, rw));
if let Some(pop) = stack.pop() {
match pop.kind {
ast::ExprKind::Binary(op, _, ref rhs) => {
separators.push(op.node.to_string());
node = rhs;
}
_ => unreachable!(),
}
} else {
break;
}
}
}
}
assert_eq!(list.len() - 1, separators.len());
Some(PairList { list, separators })
}
}
impl FlattenPair for ast::Ty {}
impl FlattenPair for ast::Pat {}
|
#[derive(new, Clone, Copy)]
pub(crate) struct PairParts<'a> {
prefix: &'a str,
|
random_line_split
|
traits.rs
|
// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use crate::{Word, Result};
use crate::private::layout::{CapTable, ListReader, StructReader, StructBuilder, StructSize,
PointerBuilder, PointerReader};
use std::marker::PhantomData;
pub trait FromStructReader<'a> {
fn new(reader: StructReader<'a>) -> Self;
}
pub trait HasStructSize {
fn struct_size() -> StructSize;
}
pub trait IntoInternalStructReader<'a> {
fn into_internal_struct_reader(self) -> StructReader<'a>;
}
pub trait FromStructBuilder<'a> {
fn new(struct_builder: StructBuilder<'a>) -> Self;
}
pub trait IntoInternalListReader<'a> {
fn into_internal_list_reader(self) -> ListReader<'a>;
}
pub trait FromPointerReader<'a> : Sized {
fn get_from_pointer(reader: &PointerReader<'a>, default: Option<&'a [Word]>) -> Result<Self>;
}
/// Associated types hackery that allows us to reason about Cap'n Proto types
/// without needing to give them a lifetime `'a`.
///
/// If `Foo` is a Cap'n Proto struct and `Bar` is a Rust-native struct, then
/// `foo::Reader<'a>` is to `foo::Owned` as `&'a Bar` is to `Bar`, and
/// `foo::Builder<'a>` is to `foo::Owned` as `&'a mut Bar` is to `Bar`.
/// The relationship is formalized by an `impl <'a> capnp::traits::Owned<'a> for foo::Owned`.
/// Because Cap'n Proto struct layout differs from Rust struct layout, a `foo::Owned` value
/// cannot be used for anything interesting on its own; the `foo::Owned` type is useful
/// nonetheless as a type parameter, e.g. for a generic container that owns a Cap'n Proto
/// message of type `T: for<'a> capnp::traits::Owned<'a>`.
pub trait Owned<'a> {
type Reader: FromPointerReader<'a> + SetPointerBuilder<Self::Builder>;
type Builder: FromPointerBuilder<'a>;
}
pub trait OwnedStruct<'a> {
type Reader: FromStructReader<'a> + SetPointerBuilder<Self::Builder> + IntoInternalStructReader<'a>;
type Builder: FromStructBuilder<'a> + HasStructSize;
}
pub trait Pipelined {
type Pipeline;
}
pub trait FromPointerBuilder<'a> : Sized {
fn init_pointer(builder: PointerBuilder<'a>, length: u32) -> Self;
fn get_from_pointer(builder: PointerBuilder<'a>, default: Option<&'a [Word]>) -> Result<Self>;
}
pub trait SetPointerBuilder<To> {
fn set_pointer_builder<'a>(builder: PointerBuilder<'a>, from: Self, canonicalize: bool) -> Result<()>;
}
pub trait Imbue<'a> {
fn imbue(&mut self, caps: &'a CapTable);
}
pub trait ImbueMut<'a> {
fn imbue_mut(&mut self, caps: &'a mut CapTable);
}
pub trait HasTypeId {
fn type_id() -> u64;
}
pub trait ToU16 {
fn to_u16(self) -> u16;
}
pub trait FromU16 : Sized {
fn from_u16(value: u16) -> ::std::result::Result<Self, crate::NotInSchema>;
}
pub trait IndexMove<I, T> {
fn index_move(&self, index: I) -> T;
}
pub struct ListIter<T, U> {
marker: PhantomData<U>,
list: T,
index: u32,
size: u32,
}
impl <T, U> ListIter<T, U>{
pub fn new(list: T, size: u32) -> ListIter<T, U> {
ListIter { list: list, index: 0, size: size, marker: PhantomData }
}
}
impl <U, T : IndexMove<u32, U>> ::std::iter::Iterator for ListIter<T, U> {
type Item = U;
fn next(&mut self) -> ::std::option::Option<U> {
if self.index < self.size
|
else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>){
(self.size as usize, Some(self.size as usize))
}
fn nth(&mut self, p: usize) -> Option<U>{
if p < self.size as usize {
self.index = p as u32;
let result = self.list.index_move(self.index);
Some(result)
} else {
None
}
}
}
impl <U, T: IndexMove<u32, U>> ::std::iter::ExactSizeIterator for ListIter<T, U>{
fn len(&self) -> usize{
self.size as usize
}
}
impl <U, T: IndexMove<u32, U>> ::std::iter::DoubleEndedIterator for ListIter<T, U>{
fn next_back(&mut self) -> ::std::option::Option<U> {
if self.size > self.index {
self.size -= 1;
Some(self.list.index_move(self.size))
} else {
None
}
}
}
|
{
let result = self.list.index_move(self.index);
self.index += 1;
Some(result)
}
|
conditional_block
|
traits.rs
|
// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use crate::{Word, Result};
use crate::private::layout::{CapTable, ListReader, StructReader, StructBuilder, StructSize,
PointerBuilder, PointerReader};
use std::marker::PhantomData;
|
pub trait HasStructSize {
fn struct_size() -> StructSize;
}
pub trait IntoInternalStructReader<'a> {
fn into_internal_struct_reader(self) -> StructReader<'a>;
}
pub trait FromStructBuilder<'a> {
fn new(struct_builder: StructBuilder<'a>) -> Self;
}
pub trait IntoInternalListReader<'a> {
fn into_internal_list_reader(self) -> ListReader<'a>;
}
pub trait FromPointerReader<'a> : Sized {
fn get_from_pointer(reader: &PointerReader<'a>, default: Option<&'a [Word]>) -> Result<Self>;
}
/// Associated types hackery that allows us to reason about Cap'n Proto types
/// without needing to give them a lifetime `'a`.
///
/// If `Foo` is a Cap'n Proto struct and `Bar` is a Rust-native struct, then
/// `foo::Reader<'a>` is to `foo::Owned` as `&'a Bar` is to `Bar`, and
/// `foo::Builder<'a>` is to `foo::Owned` as `&'a mut Bar` is to `Bar`.
/// The relationship is formalized by an `impl <'a> capnp::traits::Owned<'a> for foo::Owned`.
/// Because Cap'n Proto struct layout differs from Rust struct layout, a `foo::Owned` value
/// cannot be used for anything interesting on its own; the `foo::Owned` type is useful
/// nonetheless as a type parameter, e.g. for a generic container that owns a Cap'n Proto
/// message of type `T: for<'a> capnp::traits::Owned<'a>`.
pub trait Owned<'a> {
type Reader: FromPointerReader<'a> + SetPointerBuilder<Self::Builder>;
type Builder: FromPointerBuilder<'a>;
}
pub trait OwnedStruct<'a> {
type Reader: FromStructReader<'a> + SetPointerBuilder<Self::Builder> + IntoInternalStructReader<'a>;
type Builder: FromStructBuilder<'a> + HasStructSize;
}
pub trait Pipelined {
type Pipeline;
}
pub trait FromPointerBuilder<'a> : Sized {
fn init_pointer(builder: PointerBuilder<'a>, length: u32) -> Self;
fn get_from_pointer(builder: PointerBuilder<'a>, default: Option<&'a [Word]>) -> Result<Self>;
}
pub trait SetPointerBuilder<To> {
fn set_pointer_builder<'a>(builder: PointerBuilder<'a>, from: Self, canonicalize: bool) -> Result<()>;
}
pub trait Imbue<'a> {
fn imbue(&mut self, caps: &'a CapTable);
}
pub trait ImbueMut<'a> {
fn imbue_mut(&mut self, caps: &'a mut CapTable);
}
pub trait HasTypeId {
fn type_id() -> u64;
}
pub trait ToU16 {
fn to_u16(self) -> u16;
}
pub trait FromU16 : Sized {
fn from_u16(value: u16) -> ::std::result::Result<Self, crate::NotInSchema>;
}
pub trait IndexMove<I, T> {
fn index_move(&self, index: I) -> T;
}
pub struct ListIter<T, U> {
marker: PhantomData<U>,
list: T,
index: u32,
size: u32,
}
impl <T, U> ListIter<T, U>{
pub fn new(list: T, size: u32) -> ListIter<T, U> {
ListIter { list: list, index: 0, size: size, marker: PhantomData }
}
}
impl <U, T : IndexMove<u32, U>> ::std::iter::Iterator for ListIter<T, U> {
type Item = U;
fn next(&mut self) -> ::std::option::Option<U> {
if self.index < self.size {
let result = self.list.index_move(self.index);
self.index += 1;
Some(result)
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>){
(self.size as usize, Some(self.size as usize))
}
fn nth(&mut self, p: usize) -> Option<U>{
if p < self.size as usize {
self.index = p as u32;
let result = self.list.index_move(self.index);
Some(result)
} else {
None
}
}
}
impl <U, T: IndexMove<u32, U>> ::std::iter::ExactSizeIterator for ListIter<T, U>{
fn len(&self) -> usize{
self.size as usize
}
}
impl <U, T: IndexMove<u32, U>> ::std::iter::DoubleEndedIterator for ListIter<T, U>{
fn next_back(&mut self) -> ::std::option::Option<U> {
if self.size > self.index {
self.size -= 1;
Some(self.list.index_move(self.size))
} else {
None
}
}
}
|
pub trait FromStructReader<'a> {
fn new(reader: StructReader<'a>) -> Self;
}
|
random_line_split
|
traits.rs
|
// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use crate::{Word, Result};
use crate::private::layout::{CapTable, ListReader, StructReader, StructBuilder, StructSize,
PointerBuilder, PointerReader};
use std::marker::PhantomData;
pub trait FromStructReader<'a> {
fn new(reader: StructReader<'a>) -> Self;
}
pub trait HasStructSize {
fn struct_size() -> StructSize;
}
pub trait IntoInternalStructReader<'a> {
fn into_internal_struct_reader(self) -> StructReader<'a>;
}
pub trait FromStructBuilder<'a> {
fn new(struct_builder: StructBuilder<'a>) -> Self;
}
pub trait IntoInternalListReader<'a> {
fn into_internal_list_reader(self) -> ListReader<'a>;
}
pub trait FromPointerReader<'a> : Sized {
fn get_from_pointer(reader: &PointerReader<'a>, default: Option<&'a [Word]>) -> Result<Self>;
}
/// Associated types hackery that allows us to reason about Cap'n Proto types
/// without needing to give them a lifetime `'a`.
///
/// If `Foo` is a Cap'n Proto struct and `Bar` is a Rust-native struct, then
/// `foo::Reader<'a>` is to `foo::Owned` as `&'a Bar` is to `Bar`, and
/// `foo::Builder<'a>` is to `foo::Owned` as `&'a mut Bar` is to `Bar`.
/// The relationship is formalized by an `impl <'a> capnp::traits::Owned<'a> for foo::Owned`.
/// Because Cap'n Proto struct layout differs from Rust struct layout, a `foo::Owned` value
/// cannot be used for anything interesting on its own; the `foo::Owned` type is useful
/// nonetheless as a type parameter, e.g. for a generic container that owns a Cap'n Proto
/// message of type `T: for<'a> capnp::traits::Owned<'a>`.
pub trait Owned<'a> {
type Reader: FromPointerReader<'a> + SetPointerBuilder<Self::Builder>;
type Builder: FromPointerBuilder<'a>;
}
pub trait OwnedStruct<'a> {
type Reader: FromStructReader<'a> + SetPointerBuilder<Self::Builder> + IntoInternalStructReader<'a>;
type Builder: FromStructBuilder<'a> + HasStructSize;
}
pub trait Pipelined {
type Pipeline;
}
pub trait FromPointerBuilder<'a> : Sized {
fn init_pointer(builder: PointerBuilder<'a>, length: u32) -> Self;
fn get_from_pointer(builder: PointerBuilder<'a>, default: Option<&'a [Word]>) -> Result<Self>;
}
pub trait SetPointerBuilder<To> {
fn set_pointer_builder<'a>(builder: PointerBuilder<'a>, from: Self, canonicalize: bool) -> Result<()>;
}
pub trait Imbue<'a> {
fn imbue(&mut self, caps: &'a CapTable);
}
pub trait ImbueMut<'a> {
fn imbue_mut(&mut self, caps: &'a mut CapTable);
}
pub trait HasTypeId {
fn type_id() -> u64;
}
pub trait ToU16 {
fn to_u16(self) -> u16;
}
pub trait FromU16 : Sized {
fn from_u16(value: u16) -> ::std::result::Result<Self, crate::NotInSchema>;
}
pub trait IndexMove<I, T> {
fn index_move(&self, index: I) -> T;
}
pub struct ListIter<T, U> {
marker: PhantomData<U>,
list: T,
index: u32,
size: u32,
}
impl <T, U> ListIter<T, U>{
pub fn new(list: T, size: u32) -> ListIter<T, U> {
ListIter { list: list, index: 0, size: size, marker: PhantomData }
}
}
impl <U, T : IndexMove<u32, U>> ::std::iter::Iterator for ListIter<T, U> {
type Item = U;
fn next(&mut self) -> ::std::option::Option<U> {
if self.index < self.size {
let result = self.list.index_move(self.index);
self.index += 1;
Some(result)
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>){
(self.size as usize, Some(self.size as usize))
}
fn
|
(&mut self, p: usize) -> Option<U>{
if p < self.size as usize {
self.index = p as u32;
let result = self.list.index_move(self.index);
Some(result)
} else {
None
}
}
}
impl <U, T: IndexMove<u32, U>> ::std::iter::ExactSizeIterator for ListIter<T, U>{
fn len(&self) -> usize{
self.size as usize
}
}
impl <U, T: IndexMove<u32, U>> ::std::iter::DoubleEndedIterator for ListIter<T, U>{
fn next_back(&mut self) -> ::std::option::Option<U> {
if self.size > self.index {
self.size -= 1;
Some(self.list.index_move(self.size))
} else {
None
}
}
}
|
nth
|
identifier_name
|
header.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use common;
use common::config;
pub struct TestProps {
// Lines that should be expected, in order, on standard out
error_patterns: ~[~str],
// Extra flags to pass to the compiler
compile_flags: Option<~str>,
// If present, the name of a file that this test should match when
// pretty-printed
pp_exact: Option<Path>,
// Modules from aux directory that should be compiled
aux_builds: ~[~str],
// Environment settings to use during execution
exec_env: ~[(~str,~str)],
// Commands to be given to the debugger, when testing debug info
debugger_cmds: ~[~str],
// Lines to check if they appear in the expected debugger output
check_lines: ~[~str],
|
// Load any test directives embedded in the file
pub fn load_props(testfile: &Path) -> TestProps {
let mut error_patterns = ~[];
let mut aux_builds = ~[];
let mut exec_env = ~[];
let mut compile_flags = None;
let mut pp_exact = None;
let mut debugger_cmds = ~[];
let mut check_lines = ~[];
for iter_header(testfile) |ln| {
match parse_error_pattern(ln) {
Some(ep) => error_patterns.push(ep),
None => ()
};
if compile_flags.is_none() {
compile_flags = parse_compile_flags(ln);
}
if pp_exact.is_none() {
pp_exact = parse_pp_exact(ln, testfile);
}
match parse_aux_build(ln) {
Some(ab) => { aux_builds.push(ab); }
None => {}
}
match parse_exec_env(ln) {
Some(ee) => { exec_env.push(ee); }
None => {}
}
match parse_debugger_cmd(ln) {
Some(dc) => debugger_cmds.push(dc),
None => ()
};
match parse_check_line(ln) {
Some(cl) => check_lines.push(cl),
None => ()
};
};
return TestProps {
error_patterns: error_patterns,
compile_flags: compile_flags,
pp_exact: pp_exact,
aux_builds: aux_builds,
exec_env: exec_env,
debugger_cmds: debugger_cmds,
check_lines: check_lines
};
}
pub fn is_test_ignored(config: &config, testfile: &Path) -> bool {
for iter_header(testfile) |ln| {
if parse_name_directive(ln, ~"xfail-test") { return true; }
if parse_name_directive(ln, xfail_target()) { return true; }
if config.mode == common::mode_pretty &&
parse_name_directive(ln, ~"xfail-pretty") { return true; }
};
return false;
fn xfail_target() -> ~str {
~"xfail-" + str::to_owned(os::SYSNAME)
}
}
fn iter_header(testfile: &Path, it: &fn(~str) -> bool) -> bool {
let rdr = io::file_reader(testfile).get();
while!rdr.eof() {
let ln = rdr.read_line();
// Assume that any directives will be found before the first
// module or function. This doesn't seem to be an optimization
// with a warm page cache. Maybe with a cold one.
if str::starts_with(ln, ~"fn")
|| str::starts_with(ln, ~"mod") {
return false;
} else { if!(it(ln)) { return false; } }
}
return true;
}
fn parse_error_pattern(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"error-pattern")
}
fn parse_aux_build(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"aux-build")
}
fn parse_compile_flags(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"compile-flags")
}
fn parse_debugger_cmd(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"debugger")
}
fn parse_check_line(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"check")
}
fn parse_exec_env(line: &str) -> Option<(~str, ~str)> {
do parse_name_value_directive(line, ~"exec-env").map |nv| {
// nv is either FOO or FOO=BAR
let mut strs = ~[];
for str::each_splitn_char(*nv, '=', 1u) |s| { strs.push(s.to_owned()); }
match strs.len() {
1u => (strs.pop(), ~""),
2u => {
let end = strs.pop();
(strs.pop(), end)
}
n => fail!("Expected 1 or 2 strings, not %u", n)
}
}
}
fn parse_pp_exact(line: &str, testfile: &Path) -> Option<Path> {
match parse_name_value_directive(line, ~"pp-exact") {
Some(s) => Some(Path(s)),
None => {
if parse_name_directive(line, "pp-exact") {
Some(testfile.file_path())
} else {
None
}
}
}
}
fn parse_name_directive(line: &str, directive: &str) -> bool {
str::contains(line, directive)
}
fn parse_name_value_directive(line: &str,
directive: ~str) -> Option<~str> {
let keycolon = directive + ~":";
match str::find_str(line, keycolon) {
Some(colon) => {
let value = str::slice(line, colon + str::len(keycolon),
str::len(line)).to_owned();
debug!("%s: %s", directive, value);
Some(value)
}
None => None
}
}
|
}
|
random_line_split
|
header.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use common;
use common::config;
pub struct TestProps {
// Lines that should be expected, in order, on standard out
error_patterns: ~[~str],
// Extra flags to pass to the compiler
compile_flags: Option<~str>,
// If present, the name of a file that this test should match when
// pretty-printed
pp_exact: Option<Path>,
// Modules from aux directory that should be compiled
aux_builds: ~[~str],
// Environment settings to use during execution
exec_env: ~[(~str,~str)],
// Commands to be given to the debugger, when testing debug info
debugger_cmds: ~[~str],
// Lines to check if they appear in the expected debugger output
check_lines: ~[~str],
}
// Load any test directives embedded in the file
pub fn load_props(testfile: &Path) -> TestProps {
let mut error_patterns = ~[];
let mut aux_builds = ~[];
let mut exec_env = ~[];
let mut compile_flags = None;
let mut pp_exact = None;
let mut debugger_cmds = ~[];
let mut check_lines = ~[];
for iter_header(testfile) |ln| {
match parse_error_pattern(ln) {
Some(ep) => error_patterns.push(ep),
None => ()
};
if compile_flags.is_none() {
compile_flags = parse_compile_flags(ln);
}
if pp_exact.is_none() {
pp_exact = parse_pp_exact(ln, testfile);
}
match parse_aux_build(ln) {
Some(ab) => { aux_builds.push(ab); }
None =>
|
}
match parse_exec_env(ln) {
Some(ee) => { exec_env.push(ee); }
None => {}
}
match parse_debugger_cmd(ln) {
Some(dc) => debugger_cmds.push(dc),
None => ()
};
match parse_check_line(ln) {
Some(cl) => check_lines.push(cl),
None => ()
};
};
return TestProps {
error_patterns: error_patterns,
compile_flags: compile_flags,
pp_exact: pp_exact,
aux_builds: aux_builds,
exec_env: exec_env,
debugger_cmds: debugger_cmds,
check_lines: check_lines
};
}
pub fn is_test_ignored(config: &config, testfile: &Path) -> bool {
for iter_header(testfile) |ln| {
if parse_name_directive(ln, ~"xfail-test") { return true; }
if parse_name_directive(ln, xfail_target()) { return true; }
if config.mode == common::mode_pretty &&
parse_name_directive(ln, ~"xfail-pretty") { return true; }
};
return false;
fn xfail_target() -> ~str {
~"xfail-" + str::to_owned(os::SYSNAME)
}
}
fn iter_header(testfile: &Path, it: &fn(~str) -> bool) -> bool {
let rdr = io::file_reader(testfile).get();
while!rdr.eof() {
let ln = rdr.read_line();
// Assume that any directives will be found before the first
// module or function. This doesn't seem to be an optimization
// with a warm page cache. Maybe with a cold one.
if str::starts_with(ln, ~"fn")
|| str::starts_with(ln, ~"mod") {
return false;
} else { if!(it(ln)) { return false; } }
}
return true;
}
fn parse_error_pattern(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"error-pattern")
}
fn parse_aux_build(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"aux-build")
}
fn parse_compile_flags(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"compile-flags")
}
fn parse_debugger_cmd(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"debugger")
}
fn parse_check_line(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"check")
}
fn parse_exec_env(line: &str) -> Option<(~str, ~str)> {
do parse_name_value_directive(line, ~"exec-env").map |nv| {
// nv is either FOO or FOO=BAR
let mut strs = ~[];
for str::each_splitn_char(*nv, '=', 1u) |s| { strs.push(s.to_owned()); }
match strs.len() {
1u => (strs.pop(), ~""),
2u => {
let end = strs.pop();
(strs.pop(), end)
}
n => fail!("Expected 1 or 2 strings, not %u", n)
}
}
}
fn parse_pp_exact(line: &str, testfile: &Path) -> Option<Path> {
match parse_name_value_directive(line, ~"pp-exact") {
Some(s) => Some(Path(s)),
None => {
if parse_name_directive(line, "pp-exact") {
Some(testfile.file_path())
} else {
None
}
}
}
}
fn parse_name_directive(line: &str, directive: &str) -> bool {
str::contains(line, directive)
}
fn parse_name_value_directive(line: &str,
directive: ~str) -> Option<~str> {
let keycolon = directive + ~":";
match str::find_str(line, keycolon) {
Some(colon) => {
let value = str::slice(line, colon + str::len(keycolon),
str::len(line)).to_owned();
debug!("%s: %s", directive, value);
Some(value)
}
None => None
}
}
|
{}
|
conditional_block
|
header.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use common;
use common::config;
pub struct TestProps {
// Lines that should be expected, in order, on standard out
error_patterns: ~[~str],
// Extra flags to pass to the compiler
compile_flags: Option<~str>,
// If present, the name of a file that this test should match when
// pretty-printed
pp_exact: Option<Path>,
// Modules from aux directory that should be compiled
aux_builds: ~[~str],
// Environment settings to use during execution
exec_env: ~[(~str,~str)],
// Commands to be given to the debugger, when testing debug info
debugger_cmds: ~[~str],
// Lines to check if they appear in the expected debugger output
check_lines: ~[~str],
}
// Load any test directives embedded in the file
pub fn load_props(testfile: &Path) -> TestProps {
let mut error_patterns = ~[];
let mut aux_builds = ~[];
let mut exec_env = ~[];
let mut compile_flags = None;
let mut pp_exact = None;
let mut debugger_cmds = ~[];
let mut check_lines = ~[];
for iter_header(testfile) |ln| {
match parse_error_pattern(ln) {
Some(ep) => error_patterns.push(ep),
None => ()
};
if compile_flags.is_none() {
compile_flags = parse_compile_flags(ln);
}
if pp_exact.is_none() {
pp_exact = parse_pp_exact(ln, testfile);
}
match parse_aux_build(ln) {
Some(ab) => { aux_builds.push(ab); }
None => {}
}
match parse_exec_env(ln) {
Some(ee) => { exec_env.push(ee); }
None => {}
}
match parse_debugger_cmd(ln) {
Some(dc) => debugger_cmds.push(dc),
None => ()
};
match parse_check_line(ln) {
Some(cl) => check_lines.push(cl),
None => ()
};
};
return TestProps {
error_patterns: error_patterns,
compile_flags: compile_flags,
pp_exact: pp_exact,
aux_builds: aux_builds,
exec_env: exec_env,
debugger_cmds: debugger_cmds,
check_lines: check_lines
};
}
pub fn is_test_ignored(config: &config, testfile: &Path) -> bool {
for iter_header(testfile) |ln| {
if parse_name_directive(ln, ~"xfail-test") { return true; }
if parse_name_directive(ln, xfail_target()) { return true; }
if config.mode == common::mode_pretty &&
parse_name_directive(ln, ~"xfail-pretty") { return true; }
};
return false;
fn xfail_target() -> ~str {
~"xfail-" + str::to_owned(os::SYSNAME)
}
}
fn iter_header(testfile: &Path, it: &fn(~str) -> bool) -> bool {
let rdr = io::file_reader(testfile).get();
while!rdr.eof() {
let ln = rdr.read_line();
// Assume that any directives will be found before the first
// module or function. This doesn't seem to be an optimization
// with a warm page cache. Maybe with a cold one.
if str::starts_with(ln, ~"fn")
|| str::starts_with(ln, ~"mod") {
return false;
} else { if!(it(ln)) { return false; } }
}
return true;
}
fn parse_error_pattern(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"error-pattern")
}
fn parse_aux_build(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"aux-build")
}
fn parse_compile_flags(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"compile-flags")
}
fn parse_debugger_cmd(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"debugger")
}
fn parse_check_line(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"check")
}
fn parse_exec_env(line: &str) -> Option<(~str, ~str)> {
do parse_name_value_directive(line, ~"exec-env").map |nv| {
// nv is either FOO or FOO=BAR
let mut strs = ~[];
for str::each_splitn_char(*nv, '=', 1u) |s| { strs.push(s.to_owned()); }
match strs.len() {
1u => (strs.pop(), ~""),
2u => {
let end = strs.pop();
(strs.pop(), end)
}
n => fail!("Expected 1 or 2 strings, not %u", n)
}
}
}
fn parse_pp_exact(line: &str, testfile: &Path) -> Option<Path>
|
fn parse_name_directive(line: &str, directive: &str) -> bool {
str::contains(line, directive)
}
fn parse_name_value_directive(line: &str,
directive: ~str) -> Option<~str> {
let keycolon = directive + ~":";
match str::find_str(line, keycolon) {
Some(colon) => {
let value = str::slice(line, colon + str::len(keycolon),
str::len(line)).to_owned();
debug!("%s: %s", directive, value);
Some(value)
}
None => None
}
}
|
{
match parse_name_value_directive(line, ~"pp-exact") {
Some(s) => Some(Path(s)),
None => {
if parse_name_directive(line, "pp-exact") {
Some(testfile.file_path())
} else {
None
}
}
}
}
|
identifier_body
|
header.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use common;
use common::config;
pub struct TestProps {
// Lines that should be expected, in order, on standard out
error_patterns: ~[~str],
// Extra flags to pass to the compiler
compile_flags: Option<~str>,
// If present, the name of a file that this test should match when
// pretty-printed
pp_exact: Option<Path>,
// Modules from aux directory that should be compiled
aux_builds: ~[~str],
// Environment settings to use during execution
exec_env: ~[(~str,~str)],
// Commands to be given to the debugger, when testing debug info
debugger_cmds: ~[~str],
// Lines to check if they appear in the expected debugger output
check_lines: ~[~str],
}
// Load any test directives embedded in the file
pub fn
|
(testfile: &Path) -> TestProps {
let mut error_patterns = ~[];
let mut aux_builds = ~[];
let mut exec_env = ~[];
let mut compile_flags = None;
let mut pp_exact = None;
let mut debugger_cmds = ~[];
let mut check_lines = ~[];
for iter_header(testfile) |ln| {
match parse_error_pattern(ln) {
Some(ep) => error_patterns.push(ep),
None => ()
};
if compile_flags.is_none() {
compile_flags = parse_compile_flags(ln);
}
if pp_exact.is_none() {
pp_exact = parse_pp_exact(ln, testfile);
}
match parse_aux_build(ln) {
Some(ab) => { aux_builds.push(ab); }
None => {}
}
match parse_exec_env(ln) {
Some(ee) => { exec_env.push(ee); }
None => {}
}
match parse_debugger_cmd(ln) {
Some(dc) => debugger_cmds.push(dc),
None => ()
};
match parse_check_line(ln) {
Some(cl) => check_lines.push(cl),
None => ()
};
};
return TestProps {
error_patterns: error_patterns,
compile_flags: compile_flags,
pp_exact: pp_exact,
aux_builds: aux_builds,
exec_env: exec_env,
debugger_cmds: debugger_cmds,
check_lines: check_lines
};
}
pub fn is_test_ignored(config: &config, testfile: &Path) -> bool {
for iter_header(testfile) |ln| {
if parse_name_directive(ln, ~"xfail-test") { return true; }
if parse_name_directive(ln, xfail_target()) { return true; }
if config.mode == common::mode_pretty &&
parse_name_directive(ln, ~"xfail-pretty") { return true; }
};
return false;
fn xfail_target() -> ~str {
~"xfail-" + str::to_owned(os::SYSNAME)
}
}
fn iter_header(testfile: &Path, it: &fn(~str) -> bool) -> bool {
let rdr = io::file_reader(testfile).get();
while!rdr.eof() {
let ln = rdr.read_line();
// Assume that any directives will be found before the first
// module or function. This doesn't seem to be an optimization
// with a warm page cache. Maybe with a cold one.
if str::starts_with(ln, ~"fn")
|| str::starts_with(ln, ~"mod") {
return false;
} else { if!(it(ln)) { return false; } }
}
return true;
}
fn parse_error_pattern(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"error-pattern")
}
fn parse_aux_build(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"aux-build")
}
fn parse_compile_flags(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"compile-flags")
}
fn parse_debugger_cmd(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"debugger")
}
fn parse_check_line(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"check")
}
fn parse_exec_env(line: &str) -> Option<(~str, ~str)> {
do parse_name_value_directive(line, ~"exec-env").map |nv| {
// nv is either FOO or FOO=BAR
let mut strs = ~[];
for str::each_splitn_char(*nv, '=', 1u) |s| { strs.push(s.to_owned()); }
match strs.len() {
1u => (strs.pop(), ~""),
2u => {
let end = strs.pop();
(strs.pop(), end)
}
n => fail!("Expected 1 or 2 strings, not %u", n)
}
}
}
fn parse_pp_exact(line: &str, testfile: &Path) -> Option<Path> {
match parse_name_value_directive(line, ~"pp-exact") {
Some(s) => Some(Path(s)),
None => {
if parse_name_directive(line, "pp-exact") {
Some(testfile.file_path())
} else {
None
}
}
}
}
fn parse_name_directive(line: &str, directive: &str) -> bool {
str::contains(line, directive)
}
fn parse_name_value_directive(line: &str,
directive: ~str) -> Option<~str> {
let keycolon = directive + ~":";
match str::find_str(line, keycolon) {
Some(colon) => {
let value = str::slice(line, colon + str::len(keycolon),
str::len(line)).to_owned();
debug!("%s: %s", directive, value);
Some(value)
}
None => None
}
}
|
load_props
|
identifier_name
|
lib.rs
|
//! Kleene logic within Rust's type system
//!
//! Values are `True`, `False` and `Unknown`. Operations are `Not`, `BitAnd`
//! and `BitOr` from `std::ops`. There is also the `Ternary` enum which
//! represents the values at runtime and the `ToTernary` trait that adds the
//! `to_ternary()` methods to our value types.
//!
//! Examples:
//!
//! `Same` and `Not`
//!
//! ```
//!# use ternary::{True, False, Unknown, Same};
//!# use std::ops::Not;
//! type NotTrue = <<True as Not>::Output as Same<False>>::Output;
//! type NotFalse = <<False as Not>::Output as Same<True>>::Output;
//! type NotUnknown = <<Unknown as Not>::Output as Same<Unknown>>::Output;
//! ```
//!
//! Transforming Values to Runtime, `BitAnd` and `BitOr`
//!
//! ```
//!# use ternary::{True, False, Unknown, Same, Ternary, ToTernary};
//!# use std::ops::{BitAnd, BitOr};
//! assert_eq!(Ternary::T, <True as BitOr<<Unknown as BitAnd<False>>::Output>>::Output::to_ternary());
//! ```
#[deny(missing_docs)]
use std::ops::{Not, BitAnd, BitOr};
/// Our True type value
pub enum True {}
/// Our False type value
pub enum False {}
/// Our Unknown type value
pub enum Unknown {}
/// runtime representation
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum Ternary {
/// A True-ish value
T,
/// An Unknown value
U,
/// A False-ish value
F,
}
/// conversion to runtime enum
///
/// examples:
/// ```
///# use ternary::{True, False, Unknown, Ternary, ToTernary};
/// assert_eq!(True::to_ternary(), Ternary::T);
/// assert_eq!(False::to_ternary(), Ternary::F);
/// assert_eq!(Unknown::to_ternary(), Ternary::U);
/// ```
pub trait ToTernary {
fn to_ternary() -> Ternary;
}
impl ToTernary for True {
#[inline] fn to_ternary() -> Ternary { Ternary::T }
}
impl ToTernary for False {
#[inline] fn to_ternary() -> Ternary { Ternary::F }
}
impl ToTernary for Unknown {
#[inline] fn to_ternary() -> Ternary { Ternary::U }
}
/// Not
///!True == False
impl Not for True {
type Output = False;
fn not(self) -> Self::Output { match self {} }
}
///!False == True
impl Not for False {
type Output = True;
fn not(self) -> Self::Output { match self {} }
}
///!Unknown == Unknown
impl Not for Unknown {
type Output = Unknown;
fn not(self) -> Self::Output { match self {} }
}
/// BitAnd
/// True & X == X
impl<X: ToTernary> BitAnd<X> for True {
type Output = X;
fn bitand(self, _: X) -> Self::Output { match self {} }
}
/// False & X == False
impl<X: ToTernary> BitAnd<X> for False {
type Output = False;
fn bitand(self, _: X) -> Self::Output { match self {} }
}
/// Unknown & True == Unknown
impl BitAnd<True> for Unknown {
type Output = Unknown;
fn bitand(self, _: True) -> Self::Output { match self {} }
}
/// Unknown & Unknown == Unknown
impl BitAnd<Unknown> for Unknown {
type Output = Unknown;
fn bitand(self, _: Unknown) -> Self::Output
|
}
/// Unknown & False == False
impl BitAnd<False> for Unknown {
type Output = False;
fn bitand(self, _: False) -> Self::Output { match self {} }
}
/// BitOr
/// True | X == True
impl<X: ToTernary> BitOr<X> for True {
type Output = True;
fn bitor(self, _: X) -> Self::Output { match self {} }
}
/// False | X == X
impl<X: ToTernary> BitOr<X> for False {
type Output = X;
fn bitor(self, _: X) -> Self::Output { match self {} }
}
/// Unknown | True == True
impl BitOr<True> for Unknown {
type Output = True;
fn bitor(self, _: True) -> Self::Output { match self {} }
}
/// Unknown | Unknown == Unknown
impl BitOr<Unknown> for Unknown {
type Output = Unknown;
fn bitor(self, _: Unknown) -> Self::Output { match self {} }
}
/// Unknown | False == Unknown
impl BitOr<False> for Unknown {
type Output = Unknown;
fn bitor(self, _: False) -> Self::Output { match self {} }
}
/// shamelessly copied from typenum
pub trait Same<Rhs = Self> {
type Output;
}
impl<T> Same<T> for T {
type Output = T;
}
|
{ match self {} }
|
identifier_body
|
lib.rs
|
//! Kleene logic within Rust's type system
//!
//! Values are `True`, `False` and `Unknown`. Operations are `Not`, `BitAnd`
//! and `BitOr` from `std::ops`. There is also the `Ternary` enum which
//! represents the values at runtime and the `ToTernary` trait that adds the
//! `to_ternary()` methods to our value types.
//!
//! Examples:
//!
//! `Same` and `Not`
//!
//! ```
//!# use ternary::{True, False, Unknown, Same};
//!# use std::ops::Not;
//! type NotTrue = <<True as Not>::Output as Same<False>>::Output;
//! type NotFalse = <<False as Not>::Output as Same<True>>::Output;
//! type NotUnknown = <<Unknown as Not>::Output as Same<Unknown>>::Output;
//! ```
//!
//! Transforming Values to Runtime, `BitAnd` and `BitOr`
//!
//! ```
//!# use ternary::{True, False, Unknown, Same, Ternary, ToTernary};
//!# use std::ops::{BitAnd, BitOr};
//! assert_eq!(Ternary::T, <True as BitOr<<Unknown as BitAnd<False>>::Output>>::Output::to_ternary());
//! ```
#[deny(missing_docs)]
use std::ops::{Not, BitAnd, BitOr};
/// Our True type value
pub enum True {}
/// Our False type value
pub enum False {}
/// Our Unknown type value
pub enum Unknown {}
/// runtime representation
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum Ternary {
/// A True-ish value
T,
/// An Unknown value
U,
/// A False-ish value
F,
}
/// conversion to runtime enum
///
/// examples:
/// ```
///# use ternary::{True, False, Unknown, Ternary, ToTernary};
/// assert_eq!(True::to_ternary(), Ternary::T);
/// assert_eq!(False::to_ternary(), Ternary::F);
/// assert_eq!(Unknown::to_ternary(), Ternary::U);
/// ```
pub trait ToTernary {
fn to_ternary() -> Ternary;
}
impl ToTernary for True {
#[inline] fn to_ternary() -> Ternary { Ternary::T }
}
impl ToTernary for False {
#[inline] fn to_ternary() -> Ternary { Ternary::F }
}
impl ToTernary for Unknown {
#[inline] fn to_ternary() -> Ternary { Ternary::U }
}
/// Not
///!True == False
impl Not for True {
type Output = False;
fn not(self) -> Self::Output { match self {} }
}
///!False == True
impl Not for False {
type Output = True;
fn not(self) -> Self::Output { match self {} }
}
///!Unknown == Unknown
impl Not for Unknown {
type Output = Unknown;
fn not(self) -> Self::Output { match self {} }
}
/// BitAnd
/// True & X == X
impl<X: ToTernary> BitAnd<X> for True {
type Output = X;
fn bitand(self, _: X) -> Self::Output { match self {} }
}
/// False & X == False
impl<X: ToTernary> BitAnd<X> for False {
type Output = False;
fn bitand(self, _: X) -> Self::Output { match self {} }
}
|
impl BitAnd<True> for Unknown {
type Output = Unknown;
fn bitand(self, _: True) -> Self::Output { match self {} }
}
/// Unknown & Unknown == Unknown
impl BitAnd<Unknown> for Unknown {
type Output = Unknown;
fn bitand(self, _: Unknown) -> Self::Output { match self {} }
}
/// Unknown & False == False
impl BitAnd<False> for Unknown {
type Output = False;
fn bitand(self, _: False) -> Self::Output { match self {} }
}
/// BitOr
/// True | X == True
impl<X: ToTernary> BitOr<X> for True {
type Output = True;
fn bitor(self, _: X) -> Self::Output { match self {} }
}
/// False | X == X
impl<X: ToTernary> BitOr<X> for False {
type Output = X;
fn bitor(self, _: X) -> Self::Output { match self {} }
}
/// Unknown | True == True
impl BitOr<True> for Unknown {
type Output = True;
fn bitor(self, _: True) -> Self::Output { match self {} }
}
/// Unknown | Unknown == Unknown
impl BitOr<Unknown> for Unknown {
type Output = Unknown;
fn bitor(self, _: Unknown) -> Self::Output { match self {} }
}
/// Unknown | False == Unknown
impl BitOr<False> for Unknown {
type Output = Unknown;
fn bitor(self, _: False) -> Self::Output { match self {} }
}
/// shamelessly copied from typenum
pub trait Same<Rhs = Self> {
type Output;
}
impl<T> Same<T> for T {
type Output = T;
}
|
/// Unknown & True == Unknown
|
random_line_split
|
lib.rs
|
//! Kleene logic within Rust's type system
//!
//! Values are `True`, `False` and `Unknown`. Operations are `Not`, `BitAnd`
//! and `BitOr` from `std::ops`. There is also the `Ternary` enum which
//! represents the values at runtime and the `ToTernary` trait that adds the
//! `to_ternary()` methods to our value types.
//!
//! Examples:
//!
//! `Same` and `Not`
//!
//! ```
//!# use ternary::{True, False, Unknown, Same};
//!# use std::ops::Not;
//! type NotTrue = <<True as Not>::Output as Same<False>>::Output;
//! type NotFalse = <<False as Not>::Output as Same<True>>::Output;
//! type NotUnknown = <<Unknown as Not>::Output as Same<Unknown>>::Output;
//! ```
//!
//! Transforming Values to Runtime, `BitAnd` and `BitOr`
//!
//! ```
//!# use ternary::{True, False, Unknown, Same, Ternary, ToTernary};
//!# use std::ops::{BitAnd, BitOr};
//! assert_eq!(Ternary::T, <True as BitOr<<Unknown as BitAnd<False>>::Output>>::Output::to_ternary());
//! ```
#[deny(missing_docs)]
use std::ops::{Not, BitAnd, BitOr};
/// Our True type value
pub enum True {}
/// Our False type value
pub enum False {}
/// Our Unknown type value
pub enum Unknown {}
/// runtime representation
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum Ternary {
/// A True-ish value
T,
/// An Unknown value
U,
/// A False-ish value
F,
}
/// conversion to runtime enum
///
/// examples:
/// ```
///# use ternary::{True, False, Unknown, Ternary, ToTernary};
/// assert_eq!(True::to_ternary(), Ternary::T);
/// assert_eq!(False::to_ternary(), Ternary::F);
/// assert_eq!(Unknown::to_ternary(), Ternary::U);
/// ```
pub trait ToTernary {
fn to_ternary() -> Ternary;
}
impl ToTernary for True {
#[inline] fn to_ternary() -> Ternary { Ternary::T }
}
impl ToTernary for False {
#[inline] fn to_ternary() -> Ternary { Ternary::F }
}
impl ToTernary for Unknown {
#[inline] fn to_ternary() -> Ternary { Ternary::U }
}
/// Not
///!True == False
impl Not for True {
type Output = False;
fn not(self) -> Self::Output { match self {} }
}
///!False == True
impl Not for False {
type Output = True;
fn not(self) -> Self::Output { match self {} }
}
///!Unknown == Unknown
impl Not for Unknown {
type Output = Unknown;
fn not(self) -> Self::Output { match self {} }
}
/// BitAnd
/// True & X == X
impl<X: ToTernary> BitAnd<X> for True {
type Output = X;
fn bitand(self, _: X) -> Self::Output { match self {} }
}
/// False & X == False
impl<X: ToTernary> BitAnd<X> for False {
type Output = False;
fn
|
(self, _: X) -> Self::Output { match self {} }
}
/// Unknown & True == Unknown
impl BitAnd<True> for Unknown {
type Output = Unknown;
fn bitand(self, _: True) -> Self::Output { match self {} }
}
/// Unknown & Unknown == Unknown
impl BitAnd<Unknown> for Unknown {
type Output = Unknown;
fn bitand(self, _: Unknown) -> Self::Output { match self {} }
}
/// Unknown & False == False
impl BitAnd<False> for Unknown {
type Output = False;
fn bitand(self, _: False) -> Self::Output { match self {} }
}
/// BitOr
/// True | X == True
impl<X: ToTernary> BitOr<X> for True {
type Output = True;
fn bitor(self, _: X) -> Self::Output { match self {} }
}
/// False | X == X
impl<X: ToTernary> BitOr<X> for False {
type Output = X;
fn bitor(self, _: X) -> Self::Output { match self {} }
}
/// Unknown | True == True
impl BitOr<True> for Unknown {
type Output = True;
fn bitor(self, _: True) -> Self::Output { match self {} }
}
/// Unknown | Unknown == Unknown
impl BitOr<Unknown> for Unknown {
type Output = Unknown;
fn bitor(self, _: Unknown) -> Self::Output { match self {} }
}
/// Unknown | False == Unknown
impl BitOr<False> for Unknown {
type Output = Unknown;
fn bitor(self, _: False) -> Self::Output { match self {} }
}
/// shamelessly copied from typenum
pub trait Same<Rhs = Self> {
type Output;
}
impl<T> Same<T> for T {
type Output = T;
}
|
bitand
|
identifier_name
|
part1.rs
|
// adventofcode - day 15
// part 1
use std::io::prelude::*;
use std::fs::File;
struct Ingredient {
#[allow(dead_code)]
name: String,
capacity: i32,
durability: i32,
flavour: i32,
texture: i32,
#[allow(dead_code)]
calories: i32,
}
fn main(){
println!("Advent of Code - day 15 | part 1");
// import data
let data = import_data();
let mut ingredients = Vec::new();
for line in data.lines(){
ingredients.push( parse_line(line) );
}
let mut teaspoons = Vec::with_capacity(ingredients.len());
for _ in 0..ingredients.len(){
teaspoons.push(0);
}
let mut max_score = 0;
for ii in 0..101 {
teaspoons[0] = ii;
for jj in 0.. 101 - ii {
teaspoons[1] = jj;
for kk in 0.. 101 - (ii + jj) {
teaspoons[2] = kk;
let ll = 100 - (ii + kk + jj);
teaspoons[3] = ll;
let score = calculate_recipe(&ingredients, &teaspoons);
if score > max_score {
max_score = score;
}
}
}
}
println!("Maximal score: {}", max_score);
|
}
fn calculate_recipe(ingredients: &Vec<Ingredient>, teaspoons: &Vec<i32>) -> i32{
let mut capacity = 0;
let mut durability = 0;
let mut flavour = 0;
let mut texture = 0;
for ii in 0..ingredients.len() {
capacity += ingredients[ii].capacity * teaspoons[ii];
durability += ingredients[ii].durability * teaspoons[ii];
flavour += ingredients[ii].flavour * teaspoons[ii];
texture += ingredients[ii].texture * teaspoons[ii];
}
if capacity <= 0 || durability <= 0 || flavour <= 0 || texture <= 0 {
return 0;
}
capacity * durability * flavour * texture
}
fn parse_line(line: &str) -> Ingredient {
let properties = line.split(": capacity ")
.map(|s| s.parse::<String>().unwrap())
.collect::<Vec<String>>();
let name = properties[0].clone();
let properties = properties[1].split(", durability ")
.flat_map(|s| s.split(", flavor "))
.flat_map(|s| s.split(", texture "))
.flat_map(|s| s.split(", calories "))
.map(|s| s.parse::<i32>().unwrap())
.collect::<Vec<i32>>();
let capacity = properties[0];
let durability = properties[1];
let flavour = properties[2];
let texture = properties[3];
let calories = properties[4];
Ingredient{ name: name,
capacity: capacity,
durability: durability,
flavour: flavour,
texture: texture,
calories: calories}
}
// This function simply imports the data set from a file called input.txt
fn import_data() -> String {
let mut file = match File::open("../../inputs/15.txt") {
Ok(f) => f,
Err(e) => panic!("file error: {}", e),
};
let mut data = String::new();
match file.read_to_string(&mut data){
Ok(_) => {},
Err(e) => panic!("file error: {}", e),
};
data
}
|
random_line_split
|
|
part1.rs
|
// adventofcode - day 15
// part 1
use std::io::prelude::*;
use std::fs::File;
struct Ingredient {
#[allow(dead_code)]
name: String,
capacity: i32,
durability: i32,
flavour: i32,
texture: i32,
#[allow(dead_code)]
calories: i32,
}
fn main(){
println!("Advent of Code - day 15 | part 1");
// import data
let data = import_data();
let mut ingredients = Vec::new();
for line in data.lines(){
ingredients.push( parse_line(line) );
}
let mut teaspoons = Vec::with_capacity(ingredients.len());
for _ in 0..ingredients.len(){
teaspoons.push(0);
}
let mut max_score = 0;
for ii in 0..101 {
teaspoons[0] = ii;
for jj in 0.. 101 - ii {
teaspoons[1] = jj;
for kk in 0.. 101 - (ii + jj) {
teaspoons[2] = kk;
let ll = 100 - (ii + kk + jj);
teaspoons[3] = ll;
let score = calculate_recipe(&ingredients, &teaspoons);
if score > max_score {
max_score = score;
}
}
}
}
println!("Maximal score: {}", max_score);
}
fn calculate_recipe(ingredients: &Vec<Ingredient>, teaspoons: &Vec<i32>) -> i32{
let mut capacity = 0;
let mut durability = 0;
let mut flavour = 0;
let mut texture = 0;
for ii in 0..ingredients.len() {
capacity += ingredients[ii].capacity * teaspoons[ii];
durability += ingredients[ii].durability * teaspoons[ii];
flavour += ingredients[ii].flavour * teaspoons[ii];
texture += ingredients[ii].texture * teaspoons[ii];
}
if capacity <= 0 || durability <= 0 || flavour <= 0 || texture <= 0 {
return 0;
}
capacity * durability * flavour * texture
}
fn parse_line(line: &str) -> Ingredient {
let properties = line.split(": capacity ")
.map(|s| s.parse::<String>().unwrap())
.collect::<Vec<String>>();
let name = properties[0].clone();
let properties = properties[1].split(", durability ")
.flat_map(|s| s.split(", flavor "))
.flat_map(|s| s.split(", texture "))
.flat_map(|s| s.split(", calories "))
.map(|s| s.parse::<i32>().unwrap())
.collect::<Vec<i32>>();
let capacity = properties[0];
let durability = properties[1];
let flavour = properties[2];
let texture = properties[3];
let calories = properties[4];
Ingredient{ name: name,
capacity: capacity,
durability: durability,
flavour: flavour,
texture: texture,
calories: calories}
}
// This function simply imports the data set from a file called input.txt
fn import_data() -> String
|
{
let mut file = match File::open("../../inputs/15.txt") {
Ok(f) => f,
Err(e) => panic!("file error: {}", e),
};
let mut data = String::new();
match file.read_to_string(&mut data){
Ok(_) => {},
Err(e) => panic!("file error: {}", e),
};
data
}
|
identifier_body
|
|
part1.rs
|
// adventofcode - day 15
// part 1
use std::io::prelude::*;
use std::fs::File;
struct Ingredient {
#[allow(dead_code)]
name: String,
capacity: i32,
durability: i32,
flavour: i32,
texture: i32,
#[allow(dead_code)]
calories: i32,
}
fn main(){
println!("Advent of Code - day 15 | part 1");
// import data
let data = import_data();
let mut ingredients = Vec::new();
for line in data.lines(){
ingredients.push( parse_line(line) );
}
let mut teaspoons = Vec::with_capacity(ingredients.len());
for _ in 0..ingredients.len(){
teaspoons.push(0);
}
let mut max_score = 0;
for ii in 0..101 {
teaspoons[0] = ii;
for jj in 0.. 101 - ii {
teaspoons[1] = jj;
for kk in 0.. 101 - (ii + jj) {
teaspoons[2] = kk;
let ll = 100 - (ii + kk + jj);
teaspoons[3] = ll;
let score = calculate_recipe(&ingredients, &teaspoons);
if score > max_score {
max_score = score;
}
}
}
}
println!("Maximal score: {}", max_score);
}
fn calculate_recipe(ingredients: &Vec<Ingredient>, teaspoons: &Vec<i32>) -> i32{
let mut capacity = 0;
let mut durability = 0;
let mut flavour = 0;
let mut texture = 0;
for ii in 0..ingredients.len() {
capacity += ingredients[ii].capacity * teaspoons[ii];
durability += ingredients[ii].durability * teaspoons[ii];
flavour += ingredients[ii].flavour * teaspoons[ii];
texture += ingredients[ii].texture * teaspoons[ii];
}
if capacity <= 0 || durability <= 0 || flavour <= 0 || texture <= 0 {
return 0;
}
capacity * durability * flavour * texture
}
fn
|
(line: &str) -> Ingredient {
let properties = line.split(": capacity ")
.map(|s| s.parse::<String>().unwrap())
.collect::<Vec<String>>();
let name = properties[0].clone();
let properties = properties[1].split(", durability ")
.flat_map(|s| s.split(", flavor "))
.flat_map(|s| s.split(", texture "))
.flat_map(|s| s.split(", calories "))
.map(|s| s.parse::<i32>().unwrap())
.collect::<Vec<i32>>();
let capacity = properties[0];
let durability = properties[1];
let flavour = properties[2];
let texture = properties[3];
let calories = properties[4];
Ingredient{ name: name,
capacity: capacity,
durability: durability,
flavour: flavour,
texture: texture,
calories: calories}
}
// This function simply imports the data set from a file called input.txt
fn import_data() -> String {
let mut file = match File::open("../../inputs/15.txt") {
Ok(f) => f,
Err(e) => panic!("file error: {}", e),
};
let mut data = String::new();
match file.read_to_string(&mut data){
Ok(_) => {},
Err(e) => panic!("file error: {}", e),
};
data
}
|
parse_line
|
identifier_name
|
issue-50814-2.rs
|
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait C {
const BOO: usize;
}
trait Foo<T> {
const BAR: usize;
}
struct A<T>(T);
impl<T: C> Foo<T> for A<T> {
const BAR: usize = [5, 6, 7][T::BOO]; //~ ERROR any use of this value will cause an error
}
fn foo<T: C>() -> &'static usize {
&<A<T> as Foo<T>>::BAR //~ ERROR E0080
}
impl C for () {
const BOO: usize = 42;
}
impl C for u32 {
const BOO: usize = 1;
}
fn main() {
println!("{:x}", foo::<()>() as *const usize as usize);
println!("{:x}", foo::<u32>() as *const usize as usize);
println!("{:x}", foo::<()>());
println!("{:x}", foo::<u32>());
}
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
random_line_split
|
issue-50814-2.rs
|
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait C {
const BOO: usize;
}
trait Foo<T> {
const BAR: usize;
}
struct A<T>(T);
impl<T: C> Foo<T> for A<T> {
const BAR: usize = [5, 6, 7][T::BOO]; //~ ERROR any use of this value will cause an error
}
fn
|
<T: C>() -> &'static usize {
&<A<T> as Foo<T>>::BAR //~ ERROR E0080
}
impl C for () {
const BOO: usize = 42;
}
impl C for u32 {
const BOO: usize = 1;
}
fn main() {
println!("{:x}", foo::<()>() as *const usize as usize);
println!("{:x}", foo::<u32>() as *const usize as usize);
println!("{:x}", foo::<()>());
println!("{:x}", foo::<u32>());
}
|
foo
|
identifier_name
|
arrow.rs
|
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use ArrowType;
use Buildable;
use Misc;
use ShadowType;
use Widget;
use ffi;
use glib::StaticType;
use glib::Value;
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::SignalHandlerId;
use glib::signal::connect_raw;
use glib::translate::*;
use glib_ffi;
use gobject_ffi;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib_wrapper! {
pub struct Arrow(Object<ffi::GtkArrow, ffi::GtkArrowClass, ArrowClass>) @extends Misc, Widget, @implements Buildable;
match fn {
get_type => || ffi::gtk_arrow_get_type(),
}
}
pub const NONE_ARROW: Option<&Arrow> = None;
pub trait ArrowExt:'static {
fn get_property_arrow_type(&self) -> ArrowType;
fn set_property_arrow_type(&self, arrow_type: ArrowType);
fn get_property_shadow_type(&self) -> ShadowType;
fn set_property_shadow_type(&self, shadow_type: ShadowType);
fn connect_property_arrow_type_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_shadow_type_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<Arrow>> ArrowExt for O {
fn get_property_arrow_type(&self) -> ArrowType {
unsafe {
let mut value = Value::from_type(<ArrowType as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"arrow-type\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_arrow_type(&self, arrow_type: ArrowType) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"arrow-type\0".as_ptr() as *const _, Value::from(&arrow_type).to_glib_none().0);
}
}
fn get_property_shadow_type(&self) -> ShadowType {
unsafe {
let mut value = Value::from_type(<ShadowType as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"shadow-type\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_shadow_type(&self, shadow_type: ShadowType)
|
fn connect_property_arrow_type_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::arrow-type\0".as_ptr() as *const _,
Some(transmute(notify_arrow_type_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_shadow_type_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::shadow-type\0".as_ptr() as *const _,
Some(transmute(notify_shadow_type_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
}
unsafe extern "C" fn notify_arrow_type_trampoline<P, F: Fn(&P) +'static>(this: *mut ffi::GtkArrow, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<Arrow> {
let f: &F = transmute(f);
f(&Arrow::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_shadow_type_trampoline<P, F: Fn(&P) +'static>(this: *mut ffi::GtkArrow, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<Arrow> {
let f: &F = transmute(f);
f(&Arrow::from_glib_borrow(this).unsafe_cast())
}
impl fmt::Display for Arrow {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Arrow")
}
}
|
{
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"shadow-type\0".as_ptr() as *const _, Value::from(&shadow_type).to_glib_none().0);
}
}
|
identifier_body
|
arrow.rs
|
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use ArrowType;
use Buildable;
use Misc;
use ShadowType;
use Widget;
use ffi;
use glib::StaticType;
use glib::Value;
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::SignalHandlerId;
use glib::signal::connect_raw;
use glib::translate::*;
use glib_ffi;
use gobject_ffi;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib_wrapper! {
pub struct Arrow(Object<ffi::GtkArrow, ffi::GtkArrowClass, ArrowClass>) @extends Misc, Widget, @implements Buildable;
match fn {
get_type => || ffi::gtk_arrow_get_type(),
}
}
pub const NONE_ARROW: Option<&Arrow> = None;
pub trait ArrowExt:'static {
fn get_property_arrow_type(&self) -> ArrowType;
fn set_property_arrow_type(&self, arrow_type: ArrowType);
fn get_property_shadow_type(&self) -> ShadowType;
fn set_property_shadow_type(&self, shadow_type: ShadowType);
fn connect_property_arrow_type_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_shadow_type_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<Arrow>> ArrowExt for O {
fn get_property_arrow_type(&self) -> ArrowType {
unsafe {
let mut value = Value::from_type(<ArrowType as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"arrow-type\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_arrow_type(&self, arrow_type: ArrowType) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"arrow-type\0".as_ptr() as *const _, Value::from(&arrow_type).to_glib_none().0);
|
}
fn get_property_shadow_type(&self) -> ShadowType {
unsafe {
let mut value = Value::from_type(<ShadowType as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"shadow-type\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_shadow_type(&self, shadow_type: ShadowType) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"shadow-type\0".as_ptr() as *const _, Value::from(&shadow_type).to_glib_none().0);
}
}
fn connect_property_arrow_type_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::arrow-type\0".as_ptr() as *const _,
Some(transmute(notify_arrow_type_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_shadow_type_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::shadow-type\0".as_ptr() as *const _,
Some(transmute(notify_shadow_type_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
}
unsafe extern "C" fn notify_arrow_type_trampoline<P, F: Fn(&P) +'static>(this: *mut ffi::GtkArrow, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<Arrow> {
let f: &F = transmute(f);
f(&Arrow::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_shadow_type_trampoline<P, F: Fn(&P) +'static>(this: *mut ffi::GtkArrow, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<Arrow> {
let f: &F = transmute(f);
f(&Arrow::from_glib_borrow(this).unsafe_cast())
}
impl fmt::Display for Arrow {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Arrow")
}
}
|
}
|
random_line_split
|
arrow.rs
|
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use ArrowType;
use Buildable;
use Misc;
use ShadowType;
use Widget;
use ffi;
use glib::StaticType;
use glib::Value;
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::SignalHandlerId;
use glib::signal::connect_raw;
use glib::translate::*;
use glib_ffi;
use gobject_ffi;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib_wrapper! {
pub struct Arrow(Object<ffi::GtkArrow, ffi::GtkArrowClass, ArrowClass>) @extends Misc, Widget, @implements Buildable;
match fn {
get_type => || ffi::gtk_arrow_get_type(),
}
}
pub const NONE_ARROW: Option<&Arrow> = None;
pub trait ArrowExt:'static {
fn get_property_arrow_type(&self) -> ArrowType;
fn set_property_arrow_type(&self, arrow_type: ArrowType);
fn get_property_shadow_type(&self) -> ShadowType;
fn set_property_shadow_type(&self, shadow_type: ShadowType);
fn connect_property_arrow_type_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_shadow_type_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<Arrow>> ArrowExt for O {
fn get_property_arrow_type(&self) -> ArrowType {
unsafe {
let mut value = Value::from_type(<ArrowType as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"arrow-type\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_arrow_type(&self, arrow_type: ArrowType) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"arrow-type\0".as_ptr() as *const _, Value::from(&arrow_type).to_glib_none().0);
}
}
fn get_property_shadow_type(&self) -> ShadowType {
unsafe {
let mut value = Value::from_type(<ShadowType as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"shadow-type\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_shadow_type(&self, shadow_type: ShadowType) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"shadow-type\0".as_ptr() as *const _, Value::from(&shadow_type).to_glib_none().0);
}
}
fn connect_property_arrow_type_notify<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::arrow-type\0".as_ptr() as *const _,
Some(transmute(notify_arrow_type_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn
|
<F: Fn(&Self) +'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::shadow-type\0".as_ptr() as *const _,
Some(transmute(notify_shadow_type_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
}
unsafe extern "C" fn notify_arrow_type_trampoline<P, F: Fn(&P) +'static>(this: *mut ffi::GtkArrow, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<Arrow> {
let f: &F = transmute(f);
f(&Arrow::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_shadow_type_trampoline<P, F: Fn(&P) +'static>(this: *mut ffi::GtkArrow, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<Arrow> {
let f: &F = transmute(f);
f(&Arrow::from_glib_borrow(this).unsafe_cast())
}
impl fmt::Display for Arrow {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Arrow")
}
}
|
connect_property_shadow_type_notify
|
identifier_name
|
package.rs
|
use std::cell::{Ref, RefCell};
use std::collections::HashMap;
use std::fmt;
use std::hash;
use std::path::{Path, PathBuf};
use semver::Version;
use core::{Dependency, Manifest, PackageId, SourceId, Target, TargetKind};
use core::{Summary, Metadata, SourceMap};
use ops;
use util::{CargoResult, Config, LazyCell, ChainError, internal, human, lev_distance};
use rustc_serialize::{Encoder,Encodable};
/// Information about a package that is available somewhere in the file system.
///
/// A package is a `Cargo.toml` file plus all the files that are part of it.
// TODO: Is manifest_path a relic?
#[derive(Clone, Debug)]
pub struct Package {
// The package's manifest
manifest: Manifest,
// The root of the package
manifest_path: PathBuf,
}
#[derive(RustcEncodable)]
struct SerializedPackage<'a> {
name: &'a str,
version: &'a str,
id: &'a PackageId,
source: &'a SourceId,
dependencies: &'a [Dependency],
targets: &'a [Target],
features: &'a HashMap<String, Vec<String>>,
manifest_path: &'a str,
}
impl Encodable for Package {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
let summary = self.manifest.summary();
let package_id = summary.package_id();
SerializedPackage {
name: &package_id.name(),
version: &package_id.version().to_string(),
id: package_id,
source: summary.source_id(),
dependencies: summary.dependencies(),
targets: &self.manifest.targets(),
features: summary.features(),
manifest_path: &self.manifest_path.display().to_string(),
}.encode(s)
}
}
impl Package {
pub fn new(manifest: Manifest,
manifest_path: &Path) -> Package {
Package {
manifest: manifest,
manifest_path: manifest_path.to_path_buf(),
}
}
pub fn for_path(manifest_path: &Path, config: &Config) -> CargoResult<Package> {
let path = manifest_path.parent().unwrap();
let source_id = try!(SourceId::for_path(path));
let (pkg, _) = try!(ops::read_package(&manifest_path, &source_id,
config));
Ok(pkg)
}
pub fn dependencies(&self) -> &[Dependency] { self.manifest.dependencies() }
pub fn manifest(&self) -> &Manifest { &self.manifest }
pub fn manifest_path(&self) -> &Path { &self.manifest_path }
pub fn name(&self) -> &str { self.package_id().name() }
pub fn package_id(&self) -> &PackageId { self.manifest.package_id() }
pub fn root(&self) -> &Path { self.manifest_path.parent().unwrap() }
pub fn summary(&self) -> &Summary { self.manifest.summary() }
pub fn targets(&self) -> &[Target] { self.manifest().targets() }
pub fn version(&self) -> &Version { self.package_id().version() }
pub fn publish(&self) -> bool { self.manifest.publish() }
pub fn has_custom_build(&self) -> bool {
self.targets().iter().any(|t| t.is_custom_build())
}
pub fn generate_metadata(&self) -> Metadata {
self.package_id().generate_metadata()
}
pub fn find_closest_target(&self, target: &str, kind: TargetKind) -> Option<&Target> {
let targets = self.targets();
let matches = targets.iter().filter(|t| *t.kind() == kind)
.map(|t| (lev_distance(target, t.name()), t))
.filter(|&(d, _)| d < 4);
matches.min_by_key(|t| t.0).map(|t| t.1)
}
}
impl fmt::Display for Package {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.summary().package_id())
}
}
impl PartialEq for Package {
fn eq(&self, other: &Package) -> bool {
self.package_id() == other.package_id()
}
}
impl Eq for Package {}
impl hash::Hash for Package {
fn hash<H: hash::Hasher>(&self, into: &mut H) {
self.package_id().hash(into)
}
}
pub struct PackageSet<'cfg> {
packages: Vec<(PackageId, LazyCell<Package>)>,
sources: RefCell<SourceMap<'cfg>>,
}
impl<'cfg> PackageSet<'cfg> {
pub fn new(package_ids: &[PackageId],
sources: SourceMap<'cfg>) -> PackageSet<'cfg> {
PackageSet {
packages: package_ids.iter().map(|id| {
(id.clone(), LazyCell::new(None))
}).collect(),
sources: RefCell::new(sources),
}
}
pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item=&'a PackageId> + 'a> {
Box::new(self.packages.iter().map(|&(ref p, _)| p))
}
pub fn get(&self, id: &PackageId) -> CargoResult<&Package> {
let slot = try!(self.packages.iter().find(|p| p.0 == *id).chain_error(|| {
internal(format!("couldn't find `{}` in package set", id))
}));
let slot = &slot.1;
if let Some(pkg) = slot.borrow()
|
let mut sources = self.sources.borrow_mut();
let source = try!(sources.get_mut(id.source_id()).chain_error(|| {
internal(format!("couldn't find source for `{}`", id))
}));
let pkg = try!(source.download(id).chain_error(|| {
human("unable to get packages from source")
}));
assert!(slot.fill(pkg).is_ok());
Ok(slot.borrow().unwrap())
}
pub fn sources(&self) -> Ref<SourceMap<'cfg>> {
self.sources.borrow()
}
}
|
{
return Ok(pkg)
}
|
conditional_block
|
package.rs
|
use std::cell::{Ref, RefCell};
use std::collections::HashMap;
use std::fmt;
use std::hash;
use std::path::{Path, PathBuf};
use semver::Version;
use core::{Dependency, Manifest, PackageId, SourceId, Target, TargetKind};
use core::{Summary, Metadata, SourceMap};
use ops;
use util::{CargoResult, Config, LazyCell, ChainError, internal, human, lev_distance};
use rustc_serialize::{Encoder,Encodable};
/// Information about a package that is available somewhere in the file system.
///
/// A package is a `Cargo.toml` file plus all the files that are part of it.
// TODO: Is manifest_path a relic?
#[derive(Clone, Debug)]
pub struct Package {
// The package's manifest
manifest: Manifest,
// The root of the package
manifest_path: PathBuf,
}
#[derive(RustcEncodable)]
struct SerializedPackage<'a> {
name: &'a str,
version: &'a str,
id: &'a PackageId,
source: &'a SourceId,
dependencies: &'a [Dependency],
targets: &'a [Target],
features: &'a HashMap<String, Vec<String>>,
manifest_path: &'a str,
}
impl Encodable for Package {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
let summary = self.manifest.summary();
let package_id = summary.package_id();
SerializedPackage {
name: &package_id.name(),
version: &package_id.version().to_string(),
id: package_id,
source: summary.source_id(),
dependencies: summary.dependencies(),
targets: &self.manifest.targets(),
features: summary.features(),
manifest_path: &self.manifest_path.display().to_string(),
}.encode(s)
}
}
impl Package {
pub fn new(manifest: Manifest,
manifest_path: &Path) -> Package {
Package {
manifest: manifest,
manifest_path: manifest_path.to_path_buf(),
}
}
pub fn for_path(manifest_path: &Path, config: &Config) -> CargoResult<Package> {
let path = manifest_path.parent().unwrap();
let source_id = try!(SourceId::for_path(path));
let (pkg, _) = try!(ops::read_package(&manifest_path, &source_id,
config));
Ok(pkg)
}
pub fn dependencies(&self) -> &[Dependency] { self.manifest.dependencies() }
pub fn manifest(&self) -> &Manifest { &self.manifest }
pub fn manifest_path(&self) -> &Path { &self.manifest_path }
pub fn name(&self) -> &str { self.package_id().name() }
pub fn package_id(&self) -> &PackageId { self.manifest.package_id() }
pub fn root(&self) -> &Path { self.manifest_path.parent().unwrap() }
pub fn summary(&self) -> &Summary { self.manifest.summary() }
pub fn targets(&self) -> &[Target] { self.manifest().targets() }
pub fn version(&self) -> &Version { self.package_id().version() }
pub fn publish(&self) -> bool { self.manifest.publish() }
pub fn has_custom_build(&self) -> bool {
self.targets().iter().any(|t| t.is_custom_build())
}
pub fn generate_metadata(&self) -> Metadata {
self.package_id().generate_metadata()
}
pub fn find_closest_target(&self, target: &str, kind: TargetKind) -> Option<&Target> {
let targets = self.targets();
let matches = targets.iter().filter(|t| *t.kind() == kind)
.map(|t| (lev_distance(target, t.name()), t))
.filter(|&(d, _)| d < 4);
matches.min_by_key(|t| t.0).map(|t| t.1)
}
}
impl fmt::Display for Package {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.summary().package_id())
}
}
|
}
impl Eq for Package {}
impl hash::Hash for Package {
fn hash<H: hash::Hasher>(&self, into: &mut H) {
self.package_id().hash(into)
}
}
pub struct PackageSet<'cfg> {
packages: Vec<(PackageId, LazyCell<Package>)>,
sources: RefCell<SourceMap<'cfg>>,
}
impl<'cfg> PackageSet<'cfg> {
pub fn new(package_ids: &[PackageId],
sources: SourceMap<'cfg>) -> PackageSet<'cfg> {
PackageSet {
packages: package_ids.iter().map(|id| {
(id.clone(), LazyCell::new(None))
}).collect(),
sources: RefCell::new(sources),
}
}
pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item=&'a PackageId> + 'a> {
Box::new(self.packages.iter().map(|&(ref p, _)| p))
}
pub fn get(&self, id: &PackageId) -> CargoResult<&Package> {
let slot = try!(self.packages.iter().find(|p| p.0 == *id).chain_error(|| {
internal(format!("couldn't find `{}` in package set", id))
}));
let slot = &slot.1;
if let Some(pkg) = slot.borrow() {
return Ok(pkg)
}
let mut sources = self.sources.borrow_mut();
let source = try!(sources.get_mut(id.source_id()).chain_error(|| {
internal(format!("couldn't find source for `{}`", id))
}));
let pkg = try!(source.download(id).chain_error(|| {
human("unable to get packages from source")
}));
assert!(slot.fill(pkg).is_ok());
Ok(slot.borrow().unwrap())
}
pub fn sources(&self) -> Ref<SourceMap<'cfg>> {
self.sources.borrow()
}
}
|
impl PartialEq for Package {
fn eq(&self, other: &Package) -> bool {
self.package_id() == other.package_id()
}
|
random_line_split
|
package.rs
|
use std::cell::{Ref, RefCell};
use std::collections::HashMap;
use std::fmt;
use std::hash;
use std::path::{Path, PathBuf};
use semver::Version;
use core::{Dependency, Manifest, PackageId, SourceId, Target, TargetKind};
use core::{Summary, Metadata, SourceMap};
use ops;
use util::{CargoResult, Config, LazyCell, ChainError, internal, human, lev_distance};
use rustc_serialize::{Encoder,Encodable};
/// Information about a package that is available somewhere in the file system.
///
/// A package is a `Cargo.toml` file plus all the files that are part of it.
// TODO: Is manifest_path a relic?
#[derive(Clone, Debug)]
pub struct Package {
// The package's manifest
manifest: Manifest,
// The root of the package
manifest_path: PathBuf,
}
#[derive(RustcEncodable)]
struct SerializedPackage<'a> {
name: &'a str,
version: &'a str,
id: &'a PackageId,
source: &'a SourceId,
dependencies: &'a [Dependency],
targets: &'a [Target],
features: &'a HashMap<String, Vec<String>>,
manifest_path: &'a str,
}
impl Encodable for Package {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
let summary = self.manifest.summary();
let package_id = summary.package_id();
SerializedPackage {
name: &package_id.name(),
version: &package_id.version().to_string(),
id: package_id,
source: summary.source_id(),
dependencies: summary.dependencies(),
targets: &self.manifest.targets(),
features: summary.features(),
manifest_path: &self.manifest_path.display().to_string(),
}.encode(s)
}
}
impl Package {
pub fn new(manifest: Manifest,
manifest_path: &Path) -> Package {
Package {
manifest: manifest,
manifest_path: manifest_path.to_path_buf(),
}
}
pub fn for_path(manifest_path: &Path, config: &Config) -> CargoResult<Package> {
let path = manifest_path.parent().unwrap();
let source_id = try!(SourceId::for_path(path));
let (pkg, _) = try!(ops::read_package(&manifest_path, &source_id,
config));
Ok(pkg)
}
pub fn dependencies(&self) -> &[Dependency] { self.manifest.dependencies() }
pub fn manifest(&self) -> &Manifest { &self.manifest }
pub fn manifest_path(&self) -> &Path { &self.manifest_path }
pub fn name(&self) -> &str { self.package_id().name() }
pub fn package_id(&self) -> &PackageId { self.manifest.package_id() }
pub fn root(&self) -> &Path { self.manifest_path.parent().unwrap() }
pub fn summary(&self) -> &Summary { self.manifest.summary() }
pub fn targets(&self) -> &[Target] { self.manifest().targets() }
pub fn version(&self) -> &Version { self.package_id().version() }
pub fn publish(&self) -> bool { self.manifest.publish() }
pub fn has_custom_build(&self) -> bool {
self.targets().iter().any(|t| t.is_custom_build())
}
pub fn generate_metadata(&self) -> Metadata {
self.package_id().generate_metadata()
}
pub fn find_closest_target(&self, target: &str, kind: TargetKind) -> Option<&Target> {
let targets = self.targets();
let matches = targets.iter().filter(|t| *t.kind() == kind)
.map(|t| (lev_distance(target, t.name()), t))
.filter(|&(d, _)| d < 4);
matches.min_by_key(|t| t.0).map(|t| t.1)
}
}
impl fmt::Display for Package {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.summary().package_id())
}
}
impl PartialEq for Package {
fn eq(&self, other: &Package) -> bool
|
}
impl Eq for Package {}
impl hash::Hash for Package {
fn hash<H: hash::Hasher>(&self, into: &mut H) {
self.package_id().hash(into)
}
}
pub struct PackageSet<'cfg> {
packages: Vec<(PackageId, LazyCell<Package>)>,
sources: RefCell<SourceMap<'cfg>>,
}
impl<'cfg> PackageSet<'cfg> {
pub fn new(package_ids: &[PackageId],
sources: SourceMap<'cfg>) -> PackageSet<'cfg> {
PackageSet {
packages: package_ids.iter().map(|id| {
(id.clone(), LazyCell::new(None))
}).collect(),
sources: RefCell::new(sources),
}
}
pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item=&'a PackageId> + 'a> {
Box::new(self.packages.iter().map(|&(ref p, _)| p))
}
pub fn get(&self, id: &PackageId) -> CargoResult<&Package> {
let slot = try!(self.packages.iter().find(|p| p.0 == *id).chain_error(|| {
internal(format!("couldn't find `{}` in package set", id))
}));
let slot = &slot.1;
if let Some(pkg) = slot.borrow() {
return Ok(pkg)
}
let mut sources = self.sources.borrow_mut();
let source = try!(sources.get_mut(id.source_id()).chain_error(|| {
internal(format!("couldn't find source for `{}`", id))
}));
let pkg = try!(source.download(id).chain_error(|| {
human("unable to get packages from source")
}));
assert!(slot.fill(pkg).is_ok());
Ok(slot.borrow().unwrap())
}
pub fn sources(&self) -> Ref<SourceMap<'cfg>> {
self.sources.borrow()
}
}
|
{
self.package_id() == other.package_id()
}
|
identifier_body
|
package.rs
|
use std::cell::{Ref, RefCell};
use std::collections::HashMap;
use std::fmt;
use std::hash;
use std::path::{Path, PathBuf};
use semver::Version;
use core::{Dependency, Manifest, PackageId, SourceId, Target, TargetKind};
use core::{Summary, Metadata, SourceMap};
use ops;
use util::{CargoResult, Config, LazyCell, ChainError, internal, human, lev_distance};
use rustc_serialize::{Encoder,Encodable};
/// Information about a package that is available somewhere in the file system.
///
/// A package is a `Cargo.toml` file plus all the files that are part of it.
// TODO: Is manifest_path a relic?
#[derive(Clone, Debug)]
pub struct Package {
// The package's manifest
manifest: Manifest,
// The root of the package
manifest_path: PathBuf,
}
#[derive(RustcEncodable)]
struct SerializedPackage<'a> {
name: &'a str,
version: &'a str,
id: &'a PackageId,
source: &'a SourceId,
dependencies: &'a [Dependency],
targets: &'a [Target],
features: &'a HashMap<String, Vec<String>>,
manifest_path: &'a str,
}
impl Encodable for Package {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
let summary = self.manifest.summary();
let package_id = summary.package_id();
SerializedPackage {
name: &package_id.name(),
version: &package_id.version().to_string(),
id: package_id,
source: summary.source_id(),
dependencies: summary.dependencies(),
targets: &self.manifest.targets(),
features: summary.features(),
manifest_path: &self.manifest_path.display().to_string(),
}.encode(s)
}
}
impl Package {
pub fn new(manifest: Manifest,
manifest_path: &Path) -> Package {
Package {
manifest: manifest,
manifest_path: manifest_path.to_path_buf(),
}
}
pub fn for_path(manifest_path: &Path, config: &Config) -> CargoResult<Package> {
let path = manifest_path.parent().unwrap();
let source_id = try!(SourceId::for_path(path));
let (pkg, _) = try!(ops::read_package(&manifest_path, &source_id,
config));
Ok(pkg)
}
pub fn dependencies(&self) -> &[Dependency] { self.manifest.dependencies() }
pub fn manifest(&self) -> &Manifest { &self.manifest }
pub fn manifest_path(&self) -> &Path { &self.manifest_path }
pub fn name(&self) -> &str { self.package_id().name() }
pub fn package_id(&self) -> &PackageId { self.manifest.package_id() }
pub fn root(&self) -> &Path { self.manifest_path.parent().unwrap() }
pub fn summary(&self) -> &Summary { self.manifest.summary() }
pub fn targets(&self) -> &[Target] { self.manifest().targets() }
pub fn version(&self) -> &Version { self.package_id().version() }
pub fn publish(&self) -> bool { self.manifest.publish() }
pub fn has_custom_build(&self) -> bool {
self.targets().iter().any(|t| t.is_custom_build())
}
pub fn generate_metadata(&self) -> Metadata {
self.package_id().generate_metadata()
}
pub fn find_closest_target(&self, target: &str, kind: TargetKind) -> Option<&Target> {
let targets = self.targets();
let matches = targets.iter().filter(|t| *t.kind() == kind)
.map(|t| (lev_distance(target, t.name()), t))
.filter(|&(d, _)| d < 4);
matches.min_by_key(|t| t.0).map(|t| t.1)
}
}
impl fmt::Display for Package {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.summary().package_id())
}
}
impl PartialEq for Package {
fn eq(&self, other: &Package) -> bool {
self.package_id() == other.package_id()
}
}
impl Eq for Package {}
impl hash::Hash for Package {
fn hash<H: hash::Hasher>(&self, into: &mut H) {
self.package_id().hash(into)
}
}
pub struct PackageSet<'cfg> {
packages: Vec<(PackageId, LazyCell<Package>)>,
sources: RefCell<SourceMap<'cfg>>,
}
impl<'cfg> PackageSet<'cfg> {
pub fn new(package_ids: &[PackageId],
sources: SourceMap<'cfg>) -> PackageSet<'cfg> {
PackageSet {
packages: package_ids.iter().map(|id| {
(id.clone(), LazyCell::new(None))
}).collect(),
sources: RefCell::new(sources),
}
}
pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item=&'a PackageId> + 'a> {
Box::new(self.packages.iter().map(|&(ref p, _)| p))
}
pub fn
|
(&self, id: &PackageId) -> CargoResult<&Package> {
let slot = try!(self.packages.iter().find(|p| p.0 == *id).chain_error(|| {
internal(format!("couldn't find `{}` in package set", id))
}));
let slot = &slot.1;
if let Some(pkg) = slot.borrow() {
return Ok(pkg)
}
let mut sources = self.sources.borrow_mut();
let source = try!(sources.get_mut(id.source_id()).chain_error(|| {
internal(format!("couldn't find source for `{}`", id))
}));
let pkg = try!(source.download(id).chain_error(|| {
human("unable to get packages from source")
}));
assert!(slot.fill(pkg).is_ok());
Ok(slot.borrow().unwrap())
}
pub fn sources(&self) -> Ref<SourceMap<'cfg>> {
self.sources.borrow()
}
}
|
get
|
identifier_name
|
webgl_paint_thread.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::{CanvasCommonMsg, CanvasData, CanvasMsg, CanvasPixelData};
use canvas_traits::{FromLayoutMsg, byte_swap};
use euclid::size::Size2D;
use gleam::gl;
use ipc_channel::ipc::{self, IpcSender, IpcSharedMemory};
use offscreen_gl_context::{ColorAttachmentType, GLContext, GLLimits, GLContextAttributes, NativeGLContext};
use std::borrow::ToOwned;
use std::sync::mpsc::channel;
use util::thread::spawn_named;
use webrender_traits;
enum WebGLPaintTaskData {
WebRender(webrender_traits::RenderApi, webrender_traits::WebGLContextId),
Readback(GLContext<NativeGLContext>, (Option<(webrender_traits::RenderApi, webrender_traits::ImageKey)>)),
}
pub struct WebGLPaintThread {
size: Size2D<i32>,
data: WebGLPaintTaskData,
}
fn create_readback_painter(size: Size2D<i32>,
attrs: GLContextAttributes,
webrender_api: Option<webrender_traits::RenderApi>)
-> Result<(WebGLPaintThread, GLLimits), String> {
let context = try!(GLContext::<NativeGLContext>::new(size, attrs, ColorAttachmentType::Texture, None));
let limits = context.borrow_limits().clone();
let webrender_api_and_image_key = webrender_api.map(|wr| {
let key = wr.alloc_image();
(wr, key)
});
let painter = WebGLPaintThread {
size: size,
data: WebGLPaintTaskData::Readback(context, webrender_api_and_image_key)
};
Ok((painter, limits))
}
impl WebGLPaintThread {
fn new(size: Size2D<i32>,
attrs: GLContextAttributes,
webrender_api_sender: Option<webrender_traits::RenderApiSender>)
-> Result<(WebGLPaintThread, GLLimits), String> {
if let Some(sender) = webrender_api_sender {
let wr_api = sender.create_api();
match wr_api.request_webgl_context(&size, attrs) {
Ok((id, limits)) => {
let painter = WebGLPaintThread {
data: WebGLPaintTaskData::WebRender(wr_api, id),
size: size
};
Ok((painter, limits))
},
Err(msg) => {
warn!("Initial context creation failed, falling back to readback: {}", msg);
create_readback_painter(size, attrs, Some(wr_api))
}
}
} else {
create_readback_painter(size, attrs, None)
}
}
fn handle_webgl_message(&self, message: webrender_traits::WebGLCommand)
|
/// Creates a new `WebGLPaintThread` and returns an `IpcSender` to
/// communicate with it.
pub fn start(size: Size2D<i32>,
attrs: GLContextAttributes,
webrender_api_sender: Option<webrender_traits::RenderApiSender>)
-> Result<(IpcSender<CanvasMsg>, GLLimits), String> {
let (sender, receiver) = ipc::channel::<CanvasMsg>().unwrap();
let (result_chan, result_port) = channel();
spawn_named("WebGLThread".to_owned(), move || {
let mut painter = match WebGLPaintThread::new(size, attrs, webrender_api_sender) {
Ok((thread, limits)) => {
result_chan.send(Ok(limits)).unwrap();
thread
},
Err(e) => {
result_chan.send(Err(e)).unwrap();
return
}
};
painter.init();
loop {
match receiver.recv().unwrap() {
CanvasMsg::WebGL(message) => painter.handle_webgl_message(message),
CanvasMsg::Common(message) => {
match message {
CanvasCommonMsg::Close => break,
// TODO(emilio): handle error nicely
CanvasCommonMsg::Recreate(size) => painter.recreate(size).unwrap(),
}
},
CanvasMsg::FromLayout(message) => {
match message {
FromLayoutMsg::SendData(chan) =>
painter.send_data(chan),
}
}
CanvasMsg::Canvas2d(_) => panic!("Wrong message sent to WebGLThread"),
}
}
});
result_port.recv().unwrap().map(|limits| (sender, limits))
}
fn send_data(&mut self, chan: IpcSender<CanvasData>) {
match self.data {
WebGLPaintTaskData::Readback(_, ref webrender_api_and_image_key) => {
let width = self.size.width as usize;
let height = self.size.height as usize;
let mut pixels = gl::read_pixels(0, 0,
self.size.width as gl::GLsizei,
self.size.height as gl::GLsizei,
gl::RGBA, gl::UNSIGNED_BYTE);
// flip image vertically (texture is upside down)
let orig_pixels = pixels.clone();
let stride = width * 4;
for y in 0..height {
let dst_start = y * stride;
let src_start = (height - y - 1) * stride;
let src_slice = &orig_pixels[src_start.. src_start + stride];
(&mut pixels[dst_start.. dst_start + stride]).clone_from_slice(&src_slice[..stride]);
}
// rgba -> bgra
byte_swap(&mut pixels);
if let Some((ref wr, wr_image_key)) = *webrender_api_and_image_key {
// TODO: This shouldn't be a common path, but try to avoid
// the spurious clone().
wr.update_image(wr_image_key,
width as u32,
height as u32,
webrender_traits::ImageFormat::RGBA8,
pixels.clone());
}
let pixel_data = CanvasPixelData {
image_data: IpcSharedMemory::from_bytes(&pixels[..]),
image_key: webrender_api_and_image_key.as_ref().map(|&(_, key)| key),
};
chan.send(CanvasData::Pixels(pixel_data)).unwrap();
}
WebGLPaintTaskData::WebRender(_, id) => {
chan.send(CanvasData::WebGL(id)).unwrap();
}
}
}
#[allow(unsafe_code)]
fn recreate(&mut self, size: Size2D<i32>) -> Result<(), &'static str> {
match self.data {
WebGLPaintTaskData::Readback(ref mut context, _) => {
if size.width > self.size.width ||
size.height > self.size.height {
try!(context.resize(size));
self.size = context.borrow_draw_buffer().unwrap().size();
} else {
self.size = size;
unsafe { gl::Scissor(0, 0, size.width, size.height); }
}
}
WebGLPaintTaskData::WebRender(_, _) => {
// TODO
}
}
Ok(())
}
fn init(&mut self) {
if let WebGLPaintTaskData::Readback(ref context, _) = self.data {
context.make_current().unwrap();
}
}
}
impl Drop for WebGLPaintThread {
fn drop(&mut self) {
if let WebGLPaintTaskData::Readback(_, Some((ref mut wr, image_key))) = self.data {
wr.delete_image(image_key);
}
}
}
|
{
debug!("WebGL message: {:?}", message);
match self.data {
WebGLPaintTaskData::WebRender(ref api, id) => {
api.send_webgl_command(id, message);
}
WebGLPaintTaskData::Readback(ref ctx, _) => {
message.apply(ctx);
}
}
}
|
identifier_body
|
webgl_paint_thread.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::{CanvasCommonMsg, CanvasData, CanvasMsg, CanvasPixelData};
use canvas_traits::{FromLayoutMsg, byte_swap};
use euclid::size::Size2D;
use gleam::gl;
use ipc_channel::ipc::{self, IpcSender, IpcSharedMemory};
use offscreen_gl_context::{ColorAttachmentType, GLContext, GLLimits, GLContextAttributes, NativeGLContext};
use std::borrow::ToOwned;
use std::sync::mpsc::channel;
use util::thread::spawn_named;
use webrender_traits;
enum WebGLPaintTaskData {
WebRender(webrender_traits::RenderApi, webrender_traits::WebGLContextId),
Readback(GLContext<NativeGLContext>, (Option<(webrender_traits::RenderApi, webrender_traits::ImageKey)>)),
}
pub struct WebGLPaintThread {
size: Size2D<i32>,
data: WebGLPaintTaskData,
}
fn create_readback_painter(size: Size2D<i32>,
attrs: GLContextAttributes,
webrender_api: Option<webrender_traits::RenderApi>)
-> Result<(WebGLPaintThread, GLLimits), String> {
let context = try!(GLContext::<NativeGLContext>::new(size, attrs, ColorAttachmentType::Texture, None));
let limits = context.borrow_limits().clone();
let webrender_api_and_image_key = webrender_api.map(|wr| {
let key = wr.alloc_image();
(wr, key)
});
let painter = WebGLPaintThread {
size: size,
data: WebGLPaintTaskData::Readback(context, webrender_api_and_image_key)
};
Ok((painter, limits))
}
impl WebGLPaintThread {
fn new(size: Size2D<i32>,
attrs: GLContextAttributes,
webrender_api_sender: Option<webrender_traits::RenderApiSender>)
-> Result<(WebGLPaintThread, GLLimits), String> {
if let Some(sender) = webrender_api_sender {
let wr_api = sender.create_api();
match wr_api.request_webgl_context(&size, attrs) {
Ok((id, limits)) => {
let painter = WebGLPaintThread {
data: WebGLPaintTaskData::WebRender(wr_api, id),
size: size
};
Ok((painter, limits))
},
Err(msg) => {
warn!("Initial context creation failed, falling back to readback: {}", msg);
create_readback_painter(size, attrs, Some(wr_api))
}
}
} else {
create_readback_painter(size, attrs, None)
}
}
fn handle_webgl_message(&self, message: webrender_traits::WebGLCommand) {
debug!("WebGL message: {:?}", message);
match self.data {
WebGLPaintTaskData::WebRender(ref api, id) => {
api.send_webgl_command(id, message);
}
WebGLPaintTaskData::Readback(ref ctx, _) => {
message.apply(ctx);
}
}
}
/// Creates a new `WebGLPaintThread` and returns an `IpcSender` to
/// communicate with it.
pub fn start(size: Size2D<i32>,
attrs: GLContextAttributes,
webrender_api_sender: Option<webrender_traits::RenderApiSender>)
-> Result<(IpcSender<CanvasMsg>, GLLimits), String> {
let (sender, receiver) = ipc::channel::<CanvasMsg>().unwrap();
|
thread
},
Err(e) => {
result_chan.send(Err(e)).unwrap();
return
}
};
painter.init();
loop {
match receiver.recv().unwrap() {
CanvasMsg::WebGL(message) => painter.handle_webgl_message(message),
CanvasMsg::Common(message) => {
match message {
CanvasCommonMsg::Close => break,
// TODO(emilio): handle error nicely
CanvasCommonMsg::Recreate(size) => painter.recreate(size).unwrap(),
}
},
CanvasMsg::FromLayout(message) => {
match message {
FromLayoutMsg::SendData(chan) =>
painter.send_data(chan),
}
}
CanvasMsg::Canvas2d(_) => panic!("Wrong message sent to WebGLThread"),
}
}
});
result_port.recv().unwrap().map(|limits| (sender, limits))
}
fn send_data(&mut self, chan: IpcSender<CanvasData>) {
match self.data {
WebGLPaintTaskData::Readback(_, ref webrender_api_and_image_key) => {
let width = self.size.width as usize;
let height = self.size.height as usize;
let mut pixels = gl::read_pixels(0, 0,
self.size.width as gl::GLsizei,
self.size.height as gl::GLsizei,
gl::RGBA, gl::UNSIGNED_BYTE);
// flip image vertically (texture is upside down)
let orig_pixels = pixels.clone();
let stride = width * 4;
for y in 0..height {
let dst_start = y * stride;
let src_start = (height - y - 1) * stride;
let src_slice = &orig_pixels[src_start.. src_start + stride];
(&mut pixels[dst_start.. dst_start + stride]).clone_from_slice(&src_slice[..stride]);
}
// rgba -> bgra
byte_swap(&mut pixels);
if let Some((ref wr, wr_image_key)) = *webrender_api_and_image_key {
// TODO: This shouldn't be a common path, but try to avoid
// the spurious clone().
wr.update_image(wr_image_key,
width as u32,
height as u32,
webrender_traits::ImageFormat::RGBA8,
pixels.clone());
}
let pixel_data = CanvasPixelData {
image_data: IpcSharedMemory::from_bytes(&pixels[..]),
image_key: webrender_api_and_image_key.as_ref().map(|&(_, key)| key),
};
chan.send(CanvasData::Pixels(pixel_data)).unwrap();
}
WebGLPaintTaskData::WebRender(_, id) => {
chan.send(CanvasData::WebGL(id)).unwrap();
}
}
}
#[allow(unsafe_code)]
fn recreate(&mut self, size: Size2D<i32>) -> Result<(), &'static str> {
match self.data {
WebGLPaintTaskData::Readback(ref mut context, _) => {
if size.width > self.size.width ||
size.height > self.size.height {
try!(context.resize(size));
self.size = context.borrow_draw_buffer().unwrap().size();
} else {
self.size = size;
unsafe { gl::Scissor(0, 0, size.width, size.height); }
}
}
WebGLPaintTaskData::WebRender(_, _) => {
// TODO
}
}
Ok(())
}
fn init(&mut self) {
if let WebGLPaintTaskData::Readback(ref context, _) = self.data {
context.make_current().unwrap();
}
}
}
impl Drop for WebGLPaintThread {
fn drop(&mut self) {
if let WebGLPaintTaskData::Readback(_, Some((ref mut wr, image_key))) = self.data {
wr.delete_image(image_key);
}
}
}
|
let (result_chan, result_port) = channel();
spawn_named("WebGLThread".to_owned(), move || {
let mut painter = match WebGLPaintThread::new(size, attrs, webrender_api_sender) {
Ok((thread, limits)) => {
result_chan.send(Ok(limits)).unwrap();
|
random_line_split
|
webgl_paint_thread.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::{CanvasCommonMsg, CanvasData, CanvasMsg, CanvasPixelData};
use canvas_traits::{FromLayoutMsg, byte_swap};
use euclid::size::Size2D;
use gleam::gl;
use ipc_channel::ipc::{self, IpcSender, IpcSharedMemory};
use offscreen_gl_context::{ColorAttachmentType, GLContext, GLLimits, GLContextAttributes, NativeGLContext};
use std::borrow::ToOwned;
use std::sync::mpsc::channel;
use util::thread::spawn_named;
use webrender_traits;
enum WebGLPaintTaskData {
WebRender(webrender_traits::RenderApi, webrender_traits::WebGLContextId),
Readback(GLContext<NativeGLContext>, (Option<(webrender_traits::RenderApi, webrender_traits::ImageKey)>)),
}
pub struct WebGLPaintThread {
size: Size2D<i32>,
data: WebGLPaintTaskData,
}
fn create_readback_painter(size: Size2D<i32>,
attrs: GLContextAttributes,
webrender_api: Option<webrender_traits::RenderApi>)
-> Result<(WebGLPaintThread, GLLimits), String> {
let context = try!(GLContext::<NativeGLContext>::new(size, attrs, ColorAttachmentType::Texture, None));
let limits = context.borrow_limits().clone();
let webrender_api_and_image_key = webrender_api.map(|wr| {
let key = wr.alloc_image();
(wr, key)
});
let painter = WebGLPaintThread {
size: size,
data: WebGLPaintTaskData::Readback(context, webrender_api_and_image_key)
};
Ok((painter, limits))
}
impl WebGLPaintThread {
fn new(size: Size2D<i32>,
attrs: GLContextAttributes,
webrender_api_sender: Option<webrender_traits::RenderApiSender>)
-> Result<(WebGLPaintThread, GLLimits), String> {
if let Some(sender) = webrender_api_sender {
let wr_api = sender.create_api();
match wr_api.request_webgl_context(&size, attrs) {
Ok((id, limits)) => {
let painter = WebGLPaintThread {
data: WebGLPaintTaskData::WebRender(wr_api, id),
size: size
};
Ok((painter, limits))
},
Err(msg) => {
warn!("Initial context creation failed, falling back to readback: {}", msg);
create_readback_painter(size, attrs, Some(wr_api))
}
}
} else {
create_readback_painter(size, attrs, None)
}
}
fn handle_webgl_message(&self, message: webrender_traits::WebGLCommand) {
debug!("WebGL message: {:?}", message);
match self.data {
WebGLPaintTaskData::WebRender(ref api, id) => {
api.send_webgl_command(id, message);
}
WebGLPaintTaskData::Readback(ref ctx, _) => {
message.apply(ctx);
}
}
}
/// Creates a new `WebGLPaintThread` and returns an `IpcSender` to
/// communicate with it.
pub fn start(size: Size2D<i32>,
attrs: GLContextAttributes,
webrender_api_sender: Option<webrender_traits::RenderApiSender>)
-> Result<(IpcSender<CanvasMsg>, GLLimits), String> {
let (sender, receiver) = ipc::channel::<CanvasMsg>().unwrap();
let (result_chan, result_port) = channel();
spawn_named("WebGLThread".to_owned(), move || {
let mut painter = match WebGLPaintThread::new(size, attrs, webrender_api_sender) {
Ok((thread, limits)) => {
result_chan.send(Ok(limits)).unwrap();
thread
},
Err(e) => {
result_chan.send(Err(e)).unwrap();
return
}
};
painter.init();
loop {
match receiver.recv().unwrap() {
CanvasMsg::WebGL(message) => painter.handle_webgl_message(message),
CanvasMsg::Common(message) => {
match message {
CanvasCommonMsg::Close => break,
// TODO(emilio): handle error nicely
CanvasCommonMsg::Recreate(size) => painter.recreate(size).unwrap(),
}
},
CanvasMsg::FromLayout(message) => {
match message {
FromLayoutMsg::SendData(chan) =>
painter.send_data(chan),
}
}
CanvasMsg::Canvas2d(_) => panic!("Wrong message sent to WebGLThread"),
}
}
});
result_port.recv().unwrap().map(|limits| (sender, limits))
}
fn
|
(&mut self, chan: IpcSender<CanvasData>) {
match self.data {
WebGLPaintTaskData::Readback(_, ref webrender_api_and_image_key) => {
let width = self.size.width as usize;
let height = self.size.height as usize;
let mut pixels = gl::read_pixels(0, 0,
self.size.width as gl::GLsizei,
self.size.height as gl::GLsizei,
gl::RGBA, gl::UNSIGNED_BYTE);
// flip image vertically (texture is upside down)
let orig_pixels = pixels.clone();
let stride = width * 4;
for y in 0..height {
let dst_start = y * stride;
let src_start = (height - y - 1) * stride;
let src_slice = &orig_pixels[src_start.. src_start + stride];
(&mut pixels[dst_start.. dst_start + stride]).clone_from_slice(&src_slice[..stride]);
}
// rgba -> bgra
byte_swap(&mut pixels);
if let Some((ref wr, wr_image_key)) = *webrender_api_and_image_key {
// TODO: This shouldn't be a common path, but try to avoid
// the spurious clone().
wr.update_image(wr_image_key,
width as u32,
height as u32,
webrender_traits::ImageFormat::RGBA8,
pixels.clone());
}
let pixel_data = CanvasPixelData {
image_data: IpcSharedMemory::from_bytes(&pixels[..]),
image_key: webrender_api_and_image_key.as_ref().map(|&(_, key)| key),
};
chan.send(CanvasData::Pixels(pixel_data)).unwrap();
}
WebGLPaintTaskData::WebRender(_, id) => {
chan.send(CanvasData::WebGL(id)).unwrap();
}
}
}
#[allow(unsafe_code)]
fn recreate(&mut self, size: Size2D<i32>) -> Result<(), &'static str> {
match self.data {
WebGLPaintTaskData::Readback(ref mut context, _) => {
if size.width > self.size.width ||
size.height > self.size.height {
try!(context.resize(size));
self.size = context.borrow_draw_buffer().unwrap().size();
} else {
self.size = size;
unsafe { gl::Scissor(0, 0, size.width, size.height); }
}
}
WebGLPaintTaskData::WebRender(_, _) => {
// TODO
}
}
Ok(())
}
fn init(&mut self) {
if let WebGLPaintTaskData::Readback(ref context, _) = self.data {
context.make_current().unwrap();
}
}
}
impl Drop for WebGLPaintThread {
fn drop(&mut self) {
if let WebGLPaintTaskData::Readback(_, Some((ref mut wr, image_key))) = self.data {
wr.delete_image(image_key);
}
}
}
|
send_data
|
identifier_name
|
kahansum.rs
|
// Implements http://rosettacode.org/wiki/Kahan_summation
#![feature(std_misc)]
#![feature(collections)]
use std::num::Float;
use std::f32;
fn
|
(lst: &[f32]) -> Option<f32> {
if lst.is_empty() { return None }
let max = lst.iter().fold(f32::NEG_INFINITY,
|a, &b| Float::max(a, b));
Some(max)
}
fn with_bits(val: f32, digits: usize) -> f32 {
let num = std::f32::to_str_digits(val, digits);
num.parse::<f32>().unwrap()
}
fn kahan_sum(lst: &[f32]) -> Option<f32> {
let mut sum = 0.0f32;
let mut c = 0.0f32;
for i in lst {
let y = *i - c;
let t = sum + y;
c = (t - sum) - y;
sum = t;
}
Some(with_bits(sum, 1))
}
fn all_sums(vec: &[f32]) -> Vec<f32> {
let mut res = Vec::new();
let mut perms = vec.permutations();
loop {
let v = perms.next();
match v {
Some(_v) => {
let mut sum = 0.0f32;
for e in &_v {
sum += with_bits(*e, 1);
}
res.push(with_bits(sum, 1));
}
None => break
}
}
res
}
#[cfg(not(test))]
fn main() {
let v = [10000.0f32, 3.14159, 2.71828];
let sums = all_sums(&v);
let res = kahan_sum(&v).unwrap();
let max = find_max(&sums[..]).unwrap();
println!("max: {} res: {}", max, res);
}
#[test]
fn test_kahansum() {
let v = [10000.0f32, 3.14159, 2.71828];
let sums = all_sums(&v);
let res = kahan_sum(&v).unwrap();
let max = find_max(&sums[..]).unwrap();
assert!(max < res);
}
#[test]
fn test_withbits() {
let v = 3.123345f32;
let res = with_bits(v, 3);
assert!(res == 3.123f32);
}
|
find_max
|
identifier_name
|
kahansum.rs
|
// Implements http://rosettacode.org/wiki/Kahan_summation
#![feature(std_misc)]
#![feature(collections)]
use std::num::Float;
use std::f32;
fn find_max(lst: &[f32]) -> Option<f32> {
if lst.is_empty() { return None }
let max = lst.iter().fold(f32::NEG_INFINITY,
|a, &b| Float::max(a, b));
Some(max)
}
fn with_bits(val: f32, digits: usize) -> f32 {
let num = std::f32::to_str_digits(val, digits);
num.parse::<f32>().unwrap()
}
fn kahan_sum(lst: &[f32]) -> Option<f32> {
let mut sum = 0.0f32;
let mut c = 0.0f32;
for i in lst {
let y = *i - c;
let t = sum + y;
c = (t - sum) - y;
sum = t;
}
Some(with_bits(sum, 1))
}
|
let mut res = Vec::new();
let mut perms = vec.permutations();
loop {
let v = perms.next();
match v {
Some(_v) => {
let mut sum = 0.0f32;
for e in &_v {
sum += with_bits(*e, 1);
}
res.push(with_bits(sum, 1));
}
None => break
}
}
res
}
#[cfg(not(test))]
fn main() {
let v = [10000.0f32, 3.14159, 2.71828];
let sums = all_sums(&v);
let res = kahan_sum(&v).unwrap();
let max = find_max(&sums[..]).unwrap();
println!("max: {} res: {}", max, res);
}
#[test]
fn test_kahansum() {
let v = [10000.0f32, 3.14159, 2.71828];
let sums = all_sums(&v);
let res = kahan_sum(&v).unwrap();
let max = find_max(&sums[..]).unwrap();
assert!(max < res);
}
#[test]
fn test_withbits() {
let v = 3.123345f32;
let res = with_bits(v, 3);
assert!(res == 3.123f32);
}
|
fn all_sums(vec: &[f32]) -> Vec<f32> {
|
random_line_split
|
kahansum.rs
|
// Implements http://rosettacode.org/wiki/Kahan_summation
#![feature(std_misc)]
#![feature(collections)]
use std::num::Float;
use std::f32;
fn find_max(lst: &[f32]) -> Option<f32> {
if lst.is_empty()
|
let max = lst.iter().fold(f32::NEG_INFINITY,
|a, &b| Float::max(a, b));
Some(max)
}
fn with_bits(val: f32, digits: usize) -> f32 {
let num = std::f32::to_str_digits(val, digits);
num.parse::<f32>().unwrap()
}
fn kahan_sum(lst: &[f32]) -> Option<f32> {
let mut sum = 0.0f32;
let mut c = 0.0f32;
for i in lst {
let y = *i - c;
let t = sum + y;
c = (t - sum) - y;
sum = t;
}
Some(with_bits(sum, 1))
}
fn all_sums(vec: &[f32]) -> Vec<f32> {
let mut res = Vec::new();
let mut perms = vec.permutations();
loop {
let v = perms.next();
match v {
Some(_v) => {
let mut sum = 0.0f32;
for e in &_v {
sum += with_bits(*e, 1);
}
res.push(with_bits(sum, 1));
}
None => break
}
}
res
}
#[cfg(not(test))]
fn main() {
let v = [10000.0f32, 3.14159, 2.71828];
let sums = all_sums(&v);
let res = kahan_sum(&v).unwrap();
let max = find_max(&sums[..]).unwrap();
println!("max: {} res: {}", max, res);
}
#[test]
fn test_kahansum() {
let v = [10000.0f32, 3.14159, 2.71828];
let sums = all_sums(&v);
let res = kahan_sum(&v).unwrap();
let max = find_max(&sums[..]).unwrap();
assert!(max < res);
}
#[test]
fn test_withbits() {
let v = 3.123345f32;
let res = with_bits(v, 3);
assert!(res == 3.123f32);
}
|
{ return None }
|
conditional_block
|
kahansum.rs
|
// Implements http://rosettacode.org/wiki/Kahan_summation
#![feature(std_misc)]
#![feature(collections)]
use std::num::Float;
use std::f32;
fn find_max(lst: &[f32]) -> Option<f32> {
if lst.is_empty() { return None }
let max = lst.iter().fold(f32::NEG_INFINITY,
|a, &b| Float::max(a, b));
Some(max)
}
fn with_bits(val: f32, digits: usize) -> f32 {
let num = std::f32::to_str_digits(val, digits);
num.parse::<f32>().unwrap()
}
fn kahan_sum(lst: &[f32]) -> Option<f32> {
let mut sum = 0.0f32;
let mut c = 0.0f32;
for i in lst {
let y = *i - c;
let t = sum + y;
c = (t - sum) - y;
sum = t;
}
Some(with_bits(sum, 1))
}
fn all_sums(vec: &[f32]) -> Vec<f32> {
let mut res = Vec::new();
let mut perms = vec.permutations();
loop {
let v = perms.next();
match v {
Some(_v) => {
let mut sum = 0.0f32;
for e in &_v {
sum += with_bits(*e, 1);
}
res.push(with_bits(sum, 1));
}
None => break
}
}
res
}
#[cfg(not(test))]
fn main() {
let v = [10000.0f32, 3.14159, 2.71828];
let sums = all_sums(&v);
let res = kahan_sum(&v).unwrap();
let max = find_max(&sums[..]).unwrap();
println!("max: {} res: {}", max, res);
}
#[test]
fn test_kahansum()
|
#[test]
fn test_withbits() {
let v = 3.123345f32;
let res = with_bits(v, 3);
assert!(res == 3.123f32);
}
|
{
let v = [10000.0f32, 3.14159, 2.71828];
let sums = all_sums(&v);
let res = kahan_sum(&v).unwrap();
let max = find_max(&sums[..]).unwrap();
assert!(max < res);
}
|
identifier_body
|
conversions.rs
|
// Copyright (c) 2016-2018 Bruce Stenning. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
// OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
// AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
#![allow(dead_code)]
use std::f32;
pub fn degrees_to_radians(degrees: f32) -> f32 {
degrees * f32::consts::PI / 180.0f32
}
|
pub fn radians_to_degrees(radians: f32) -> f32 {
radians * 180.0f32 / f32::consts::PI
}
|
random_line_split
|
|
conversions.rs
|
// Copyright (c) 2016-2018 Bruce Stenning. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
// OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
// AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
#![allow(dead_code)]
use std::f32;
pub fn degrees_to_radians(degrees: f32) -> f32 {
degrees * f32::consts::PI / 180.0f32
}
pub fn radians_to_degrees(radians: f32) -> f32
|
{
radians * 180.0f32 / f32::consts::PI
}
|
identifier_body
|
|
conversions.rs
|
// Copyright (c) 2016-2018 Bruce Stenning. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
// OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
// AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
#![allow(dead_code)]
use std::f32;
pub fn degrees_to_radians(degrees: f32) -> f32 {
degrees * f32::consts::PI / 180.0f32
}
pub fn
|
(radians: f32) -> f32 {
radians * 180.0f32 / f32::consts::PI
}
|
radians_to_degrees
|
identifier_name
|
windowing.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Abstract windowing methods. The concrete implementations of these can be found in `platform/`.
use compositor_thread::{CompositorProxy, CompositorReceiver};
use euclid::point::TypedPoint2D;
use euclid::scale_factor::ScaleFactor;
use euclid::size::TypedSize2D;
use euclid::{Point2D, Size2D};
use layers::geometry::DevicePixel;
use layers::platform::surface::NativeDisplay;
use msg::constellation_msg::{Key, KeyModifiers, KeyState};
use net_traits::net_error_list::NetError;
use script_traits::{MouseButton, TouchpadPressurePhase, TouchEventType, TouchId};
use std::fmt::{Debug, Error, Formatter};
use style_traits::cursor::Cursor;
use url::Url;
use util::geometry::ScreenPx;
#[derive(Clone)]
pub enum MouseWindowEvent {
Click(MouseButton, TypedPoint2D<DevicePixel, f32>),
MouseDown(MouseButton, TypedPoint2D<DevicePixel, f32>),
MouseUp(MouseButton, TypedPoint2D<DevicePixel, f32>),
}
#[derive(Clone)]
pub enum
|
{
Forward,
Back,
}
/// Events that the windowing system sends to Servo.
#[derive(Clone)]
pub enum WindowEvent {
/// Sent when no message has arrived, but the event loop was kicked for some reason (perhaps
/// by another Servo subsystem).
///
/// FIXME(pcwalton): This is kind of ugly and may not work well with multiprocess Servo.
/// It's possible that this should be something like
/// `CompositorMessageWindowEvent(compositor_thread::Msg)` instead.
Idle,
/// Sent when part of the window is marked dirty and needs to be redrawn. Before sending this
/// message, the window must make the same GL context as in `PrepareRenderingEvent` current.
Refresh,
/// Sent to initialize the GL context. The windowing system must have a valid, current GL
/// context when this message is sent.
InitializeCompositing,
/// Sent when the window is resized.
Resize(TypedSize2D<DevicePixel, u32>),
/// Touchpad Pressure
TouchpadPressure(TypedPoint2D<DevicePixel, f32>, f32, TouchpadPressurePhase),
/// Sent when you want to override the viewport.
Viewport(TypedPoint2D<DevicePixel, u32>, TypedSize2D<DevicePixel, u32>),
/// Sent when a new URL is to be loaded.
LoadUrl(String),
/// Sent when a mouse hit test is to be performed.
MouseWindowEventClass(MouseWindowEvent),
/// Sent when a mouse move.
MouseWindowMoveEventClass(TypedPoint2D<DevicePixel, f32>),
/// Touch event: type, identifier, point
Touch(TouchEventType, TouchId, TypedPoint2D<DevicePixel, f32>),
/// Sent when the user scrolls. The first point is the delta and the second point is the
/// origin.
Scroll(TypedPoint2D<DevicePixel, f32>, TypedPoint2D<DevicePixel, i32>, TouchEventType),
/// Sent when the user zooms.
Zoom(f32),
/// Simulated "pinch zoom" gesture for non-touch platforms (e.g. ctrl-scrollwheel).
PinchZoom(f32),
/// Sent when the user resets zoom to default.
ResetZoom,
/// Sent when the user uses chrome navigation (i.e. backspace or shift-backspace).
Navigation(WindowNavigateMsg),
/// Sent when the user quits the application
Quit,
/// Sent when a key input state changes
KeyEvent(Option<char>, Key, KeyState, KeyModifiers),
/// Sent when Ctr+R/Apple+R is called to reload the current page.
Reload,
}
impl Debug for WindowEvent {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
match *self {
WindowEvent::Idle => write!(f, "Idle"),
WindowEvent::Refresh => write!(f, "Refresh"),
WindowEvent::InitializeCompositing => write!(f, "InitializeCompositing"),
WindowEvent::Resize(..) => write!(f, "Resize"),
WindowEvent::TouchpadPressure(..) => write!(f, "TouchpadPressure"),
WindowEvent::Viewport(..) => write!(f, "Viewport"),
WindowEvent::KeyEvent(..) => write!(f, "Key"),
WindowEvent::LoadUrl(..) => write!(f, "LoadUrl"),
WindowEvent::MouseWindowEventClass(..) => write!(f, "Mouse"),
WindowEvent::MouseWindowMoveEventClass(..) => write!(f, "MouseMove"),
WindowEvent::Touch(..) => write!(f, "Touch"),
WindowEvent::Scroll(..) => write!(f, "Scroll"),
WindowEvent::Zoom(..) => write!(f, "Zoom"),
WindowEvent::PinchZoom(..) => write!(f, "PinchZoom"),
WindowEvent::ResetZoom => write!(f, "ResetZoom"),
WindowEvent::Navigation(..) => write!(f, "Navigation"),
WindowEvent::Quit => write!(f, "Quit"),
WindowEvent::Reload => write!(f, "Reload"),
}
}
}
pub trait WindowMethods {
/// Returns the size of the window in hardware pixels.
fn framebuffer_size(&self) -> TypedSize2D<DevicePixel, u32>;
/// Returns the size of the window in density-independent "px" units.
fn size(&self) -> TypedSize2D<ScreenPx, f32>;
/// Presents the window to the screen (perhaps by page flipping).
fn present(&self);
/// Return the size of the window with head and borders and position of the window values
fn client_window(&self) -> (Size2D<u32>, Point2D<i32>);
/// Set the size inside of borders and head
fn set_inner_size(&self, size: Size2D<u32>);
/// Set the window position
fn set_position(&self, point: Point2D<i32>);
/// Sets the page title for the current page.
fn set_page_title(&self, title: Option<String>);
/// Sets the load data for the current page.
fn set_page_url(&self, url: Url);
/// Called when the browser chrome should display a status message.
fn status(&self, Option<String>);
/// Called when the browser has started loading a frame.
fn load_start(&self, back: bool, forward: bool);
/// Called when the browser is done loading a frame.
fn load_end(&self, back: bool, forward: bool, root: bool);
/// Called when the browser encounters an error while loading a URL
fn load_error(&self, code: NetError, url: String);
/// Called when the <head> tag has finished parsing
fn head_parsed(&self);
/// Returns the scale factor of the system (device pixels / screen pixels).
fn scale_factor(&self) -> ScaleFactor<ScreenPx, DevicePixel, f32>;
/// Gets the OS native graphics display for this window.
fn native_display(&self) -> NativeDisplay;
/// Creates a channel to the compositor. The dummy parameter is needed because we don't have
/// UFCS in Rust yet.
///
/// This is part of the windowing system because its implementation often involves OS-specific
/// magic to wake the up window's event loop.
fn create_compositor_channel(&self)
-> (Box<CompositorProxy + Send>, Box<CompositorReceiver>);
/// Requests that the window system prepare a composite. Typically this will involve making
/// some type of platform-specific graphics context current. Returns true if the composite may
/// proceed and false if it should not.
fn prepare_for_composite(&self, width: usize, height: usize) -> bool;
/// Sets the cursor to be used in the window.
fn set_cursor(&self, cursor: Cursor);
/// Process a key event.
fn handle_key(&self, ch: Option<char>, key: Key, mods: KeyModifiers);
/// Does this window support a clipboard
fn supports_clipboard(&self) -> bool;
/// Add a favicon
fn set_favicon(&self, url: Url);
}
|
WindowNavigateMsg
|
identifier_name
|
windowing.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Abstract windowing methods. The concrete implementations of these can be found in `platform/`.
use compositor_thread::{CompositorProxy, CompositorReceiver};
use euclid::point::TypedPoint2D;
use euclid::scale_factor::ScaleFactor;
use euclid::size::TypedSize2D;
use euclid::{Point2D, Size2D};
use layers::geometry::DevicePixel;
use layers::platform::surface::NativeDisplay;
use msg::constellation_msg::{Key, KeyModifiers, KeyState};
use net_traits::net_error_list::NetError;
use script_traits::{MouseButton, TouchpadPressurePhase, TouchEventType, TouchId};
use std::fmt::{Debug, Error, Formatter};
use style_traits::cursor::Cursor;
use url::Url;
use util::geometry::ScreenPx;
#[derive(Clone)]
pub enum MouseWindowEvent {
Click(MouseButton, TypedPoint2D<DevicePixel, f32>),
MouseDown(MouseButton, TypedPoint2D<DevicePixel, f32>),
MouseUp(MouseButton, TypedPoint2D<DevicePixel, f32>),
}
#[derive(Clone)]
pub enum WindowNavigateMsg {
Forward,
Back,
}
|
/// by another Servo subsystem).
///
/// FIXME(pcwalton): This is kind of ugly and may not work well with multiprocess Servo.
/// It's possible that this should be something like
/// `CompositorMessageWindowEvent(compositor_thread::Msg)` instead.
Idle,
/// Sent when part of the window is marked dirty and needs to be redrawn. Before sending this
/// message, the window must make the same GL context as in `PrepareRenderingEvent` current.
Refresh,
/// Sent to initialize the GL context. The windowing system must have a valid, current GL
/// context when this message is sent.
InitializeCompositing,
/// Sent when the window is resized.
Resize(TypedSize2D<DevicePixel, u32>),
/// Touchpad Pressure
TouchpadPressure(TypedPoint2D<DevicePixel, f32>, f32, TouchpadPressurePhase),
/// Sent when you want to override the viewport.
Viewport(TypedPoint2D<DevicePixel, u32>, TypedSize2D<DevicePixel, u32>),
/// Sent when a new URL is to be loaded.
LoadUrl(String),
/// Sent when a mouse hit test is to be performed.
MouseWindowEventClass(MouseWindowEvent),
/// Sent when a mouse move.
MouseWindowMoveEventClass(TypedPoint2D<DevicePixel, f32>),
/// Touch event: type, identifier, point
Touch(TouchEventType, TouchId, TypedPoint2D<DevicePixel, f32>),
/// Sent when the user scrolls. The first point is the delta and the second point is the
/// origin.
Scroll(TypedPoint2D<DevicePixel, f32>, TypedPoint2D<DevicePixel, i32>, TouchEventType),
/// Sent when the user zooms.
Zoom(f32),
/// Simulated "pinch zoom" gesture for non-touch platforms (e.g. ctrl-scrollwheel).
PinchZoom(f32),
/// Sent when the user resets zoom to default.
ResetZoom,
/// Sent when the user uses chrome navigation (i.e. backspace or shift-backspace).
Navigation(WindowNavigateMsg),
/// Sent when the user quits the application
Quit,
/// Sent when a key input state changes
KeyEvent(Option<char>, Key, KeyState, KeyModifiers),
/// Sent when Ctr+R/Apple+R is called to reload the current page.
Reload,
}
impl Debug for WindowEvent {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
match *self {
WindowEvent::Idle => write!(f, "Idle"),
WindowEvent::Refresh => write!(f, "Refresh"),
WindowEvent::InitializeCompositing => write!(f, "InitializeCompositing"),
WindowEvent::Resize(..) => write!(f, "Resize"),
WindowEvent::TouchpadPressure(..) => write!(f, "TouchpadPressure"),
WindowEvent::Viewport(..) => write!(f, "Viewport"),
WindowEvent::KeyEvent(..) => write!(f, "Key"),
WindowEvent::LoadUrl(..) => write!(f, "LoadUrl"),
WindowEvent::MouseWindowEventClass(..) => write!(f, "Mouse"),
WindowEvent::MouseWindowMoveEventClass(..) => write!(f, "MouseMove"),
WindowEvent::Touch(..) => write!(f, "Touch"),
WindowEvent::Scroll(..) => write!(f, "Scroll"),
WindowEvent::Zoom(..) => write!(f, "Zoom"),
WindowEvent::PinchZoom(..) => write!(f, "PinchZoom"),
WindowEvent::ResetZoom => write!(f, "ResetZoom"),
WindowEvent::Navigation(..) => write!(f, "Navigation"),
WindowEvent::Quit => write!(f, "Quit"),
WindowEvent::Reload => write!(f, "Reload"),
}
}
}
pub trait WindowMethods {
/// Returns the size of the window in hardware pixels.
fn framebuffer_size(&self) -> TypedSize2D<DevicePixel, u32>;
/// Returns the size of the window in density-independent "px" units.
fn size(&self) -> TypedSize2D<ScreenPx, f32>;
/// Presents the window to the screen (perhaps by page flipping).
fn present(&self);
/// Return the size of the window with head and borders and position of the window values
fn client_window(&self) -> (Size2D<u32>, Point2D<i32>);
/// Set the size inside of borders and head
fn set_inner_size(&self, size: Size2D<u32>);
/// Set the window position
fn set_position(&self, point: Point2D<i32>);
/// Sets the page title for the current page.
fn set_page_title(&self, title: Option<String>);
/// Sets the load data for the current page.
fn set_page_url(&self, url: Url);
/// Called when the browser chrome should display a status message.
fn status(&self, Option<String>);
/// Called when the browser has started loading a frame.
fn load_start(&self, back: bool, forward: bool);
/// Called when the browser is done loading a frame.
fn load_end(&self, back: bool, forward: bool, root: bool);
/// Called when the browser encounters an error while loading a URL
fn load_error(&self, code: NetError, url: String);
/// Called when the <head> tag has finished parsing
fn head_parsed(&self);
/// Returns the scale factor of the system (device pixels / screen pixels).
fn scale_factor(&self) -> ScaleFactor<ScreenPx, DevicePixel, f32>;
/// Gets the OS native graphics display for this window.
fn native_display(&self) -> NativeDisplay;
/// Creates a channel to the compositor. The dummy parameter is needed because we don't have
/// UFCS in Rust yet.
///
/// This is part of the windowing system because its implementation often involves OS-specific
/// magic to wake the up window's event loop.
fn create_compositor_channel(&self)
-> (Box<CompositorProxy + Send>, Box<CompositorReceiver>);
/// Requests that the window system prepare a composite. Typically this will involve making
/// some type of platform-specific graphics context current. Returns true if the composite may
/// proceed and false if it should not.
fn prepare_for_composite(&self, width: usize, height: usize) -> bool;
/// Sets the cursor to be used in the window.
fn set_cursor(&self, cursor: Cursor);
/// Process a key event.
fn handle_key(&self, ch: Option<char>, key: Key, mods: KeyModifiers);
/// Does this window support a clipboard
fn supports_clipboard(&self) -> bool;
/// Add a favicon
fn set_favicon(&self, url: Url);
}
|
/// Events that the windowing system sends to Servo.
#[derive(Clone)]
pub enum WindowEvent {
/// Sent when no message has arrived, but the event loop was kicked for some reason (perhaps
|
random_line_split
|
system.rs
|
use std::sync::RwLock;
use std::path::{Path, PathBuf};
use std::fs;
use slog::Logger;
use search::backends::rocksdb::RocksDBStore;
use uuid::Uuid;
use index::Index;
use index::metadata::IndexMetadata;
use cluster::metadata::ClusterMetadata;
pub struct System {
pub log: Logger,
data_dir: PathBuf,
pub metadata: RwLock<ClusterMetadata>,
}
impl System {
pub fn new(log: Logger, data_dir: PathBuf) -> System {
System {
log: log,
data_dir: data_dir,
metadata: RwLock::new(ClusterMetadata::new()),
}
}
pub fn
|
(&self) -> PathBuf {
let mut dir = self.data_dir.clone();
dir.push("indices");
dir
}
fn load_index(&self, id: Uuid, name: String, path: &Path) -> Result<Index, String> {
let store = RocksDBStore::open(path)?;
// Load metadata
let mut metadata_path = path.to_path_buf();
metadata_path.push("metadata.json");
let metadata = IndexMetadata::load(metadata_path)?;
Ok(Index::new(id, name, metadata, store))
}
pub fn load_indices(&self) {
let indices_dir = self.get_indices_dir();
match fs::read_dir(indices_dir.clone()) {
Ok(files) => {
for file in files {
let path = file.unwrap().path();
if path.is_dir() {
let index_name: String = path.file_name().unwrap().to_str().unwrap().to_owned();
match self.load_index(Uuid::new_v4(), index_name.clone().to_owned(), path.as_path()) {
Ok(index) => {
let mut cluster_metadata = self.metadata.write().unwrap();
let index_ref = cluster_metadata.insert_index(index);
cluster_metadata.names.insert_canonical(index_name.clone(), index_ref).unwrap();
info!(self.log, "loaded index"; "index" => index_name);
}
Err(e) => {
error!(self.log, "load index failed"; "index" => index_name, "error" => e);
}
}
}
}
}
Err(error) => {
error!(self.log, "could not open indices directory"; "dir" => indices_dir.to_str().unwrap(), "error" => format!("{}", error));
}
}
}
}
|
get_indices_dir
|
identifier_name
|
system.rs
|
use std::sync::RwLock;
use std::path::{Path, PathBuf};
use std::fs;
use slog::Logger;
use search::backends::rocksdb::RocksDBStore;
use uuid::Uuid;
use index::Index;
use index::metadata::IndexMetadata;
use cluster::metadata::ClusterMetadata;
pub struct System {
pub log: Logger,
data_dir: PathBuf,
pub metadata: RwLock<ClusterMetadata>,
}
impl System {
pub fn new(log: Logger, data_dir: PathBuf) -> System {
System {
log: log,
data_dir: data_dir,
metadata: RwLock::new(ClusterMetadata::new()),
}
}
pub fn get_indices_dir(&self) -> PathBuf {
let mut dir = self.data_dir.clone();
dir.push("indices");
dir
}
fn load_index(&self, id: Uuid, name: String, path: &Path) -> Result<Index, String> {
let store = RocksDBStore::open(path)?;
// Load metadata
let mut metadata_path = path.to_path_buf();
metadata_path.push("metadata.json");
let metadata = IndexMetadata::load(metadata_path)?;
Ok(Index::new(id, name, metadata, store))
}
pub fn load_indices(&self) {
let indices_dir = self.get_indices_dir();
match fs::read_dir(indices_dir.clone()) {
Ok(files) => {
for file in files {
let path = file.unwrap().path();
if path.is_dir() {
let index_name: String = path.file_name().unwrap().to_str().unwrap().to_owned();
match self.load_index(Uuid::new_v4(), index_name.clone().to_owned(), path.as_path()) {
Ok(index) => {
let mut cluster_metadata = self.metadata.write().unwrap();
let index_ref = cluster_metadata.insert_index(index);
cluster_metadata.names.insert_canonical(index_name.clone(), index_ref).unwrap();
info!(self.log, "loaded index"; "index" => index_name);
}
Err(e) => {
error!(self.log, "load index failed"; "index" => index_name, "error" => e);
}
}
|
}
}
Err(error) => {
error!(self.log, "could not open indices directory"; "dir" => indices_dir.to_str().unwrap(), "error" => format!("{}", error));
}
}
}
}
|
}
|
random_line_split
|
system.rs
|
use std::sync::RwLock;
use std::path::{Path, PathBuf};
use std::fs;
use slog::Logger;
use search::backends::rocksdb::RocksDBStore;
use uuid::Uuid;
use index::Index;
use index::metadata::IndexMetadata;
use cluster::metadata::ClusterMetadata;
pub struct System {
pub log: Logger,
data_dir: PathBuf,
pub metadata: RwLock<ClusterMetadata>,
}
impl System {
pub fn new(log: Logger, data_dir: PathBuf) -> System {
System {
log: log,
data_dir: data_dir,
metadata: RwLock::new(ClusterMetadata::new()),
}
}
pub fn get_indices_dir(&self) -> PathBuf {
let mut dir = self.data_dir.clone();
dir.push("indices");
dir
}
fn load_index(&self, id: Uuid, name: String, path: &Path) -> Result<Index, String> {
let store = RocksDBStore::open(path)?;
// Load metadata
let mut metadata_path = path.to_path_buf();
metadata_path.push("metadata.json");
let metadata = IndexMetadata::load(metadata_path)?;
Ok(Index::new(id, name, metadata, store))
}
pub fn load_indices(&self) {
let indices_dir = self.get_indices_dir();
match fs::read_dir(indices_dir.clone()) {
Ok(files) => {
for file in files {
let path = file.unwrap().path();
if path.is_dir() {
let index_name: String = path.file_name().unwrap().to_str().unwrap().to_owned();
match self.load_index(Uuid::new_v4(), index_name.clone().to_owned(), path.as_path()) {
Ok(index) => {
let mut cluster_metadata = self.metadata.write().unwrap();
let index_ref = cluster_metadata.insert_index(index);
cluster_metadata.names.insert_canonical(index_name.clone(), index_ref).unwrap();
info!(self.log, "loaded index"; "index" => index_name);
}
Err(e) =>
|
}
}
}
}
Err(error) => {
error!(self.log, "could not open indices directory"; "dir" => indices_dir.to_str().unwrap(), "error" => format!("{}", error));
}
}
}
}
|
{
error!(self.log, "load index failed"; "index" => index_name, "error" => e);
}
|
conditional_block
|
system.rs
|
use std::sync::RwLock;
use std::path::{Path, PathBuf};
use std::fs;
use slog::Logger;
use search::backends::rocksdb::RocksDBStore;
use uuid::Uuid;
use index::Index;
use index::metadata::IndexMetadata;
use cluster::metadata::ClusterMetadata;
pub struct System {
pub log: Logger,
data_dir: PathBuf,
pub metadata: RwLock<ClusterMetadata>,
}
impl System {
pub fn new(log: Logger, data_dir: PathBuf) -> System
|
pub fn get_indices_dir(&self) -> PathBuf {
let mut dir = self.data_dir.clone();
dir.push("indices");
dir
}
fn load_index(&self, id: Uuid, name: String, path: &Path) -> Result<Index, String> {
let store = RocksDBStore::open(path)?;
// Load metadata
let mut metadata_path = path.to_path_buf();
metadata_path.push("metadata.json");
let metadata = IndexMetadata::load(metadata_path)?;
Ok(Index::new(id, name, metadata, store))
}
pub fn load_indices(&self) {
let indices_dir = self.get_indices_dir();
match fs::read_dir(indices_dir.clone()) {
Ok(files) => {
for file in files {
let path = file.unwrap().path();
if path.is_dir() {
let index_name: String = path.file_name().unwrap().to_str().unwrap().to_owned();
match self.load_index(Uuid::new_v4(), index_name.clone().to_owned(), path.as_path()) {
Ok(index) => {
let mut cluster_metadata = self.metadata.write().unwrap();
let index_ref = cluster_metadata.insert_index(index);
cluster_metadata.names.insert_canonical(index_name.clone(), index_ref).unwrap();
info!(self.log, "loaded index"; "index" => index_name);
}
Err(e) => {
error!(self.log, "load index failed"; "index" => index_name, "error" => e);
}
}
}
}
}
Err(error) => {
error!(self.log, "could not open indices directory"; "dir" => indices_dir.to_str().unwrap(), "error" => format!("{}", error));
}
}
}
}
|
{
System {
log: log,
data_dir: data_dir,
metadata: RwLock::new(ClusterMetadata::new()),
}
}
|
identifier_body
|
macros-nested.rs
|
// This test is similar to `macros.rs`, but nested in modules.
// run-pass
// edition:2018
|
#![allow(non_camel_case_types)]
mod foo {
// Test that ambiguity errors are not emitted between `self::test` and
// `::test`, assuming the latter (crate) is not in `extern_prelude`.
macro_rules! m1 {
() => {
mod test {
pub struct Foo(pub ());
}
}
}
pub use test::Foo;
m1!();
// Test that qualified paths can refer to both the external crate and local item.
macro_rules! m2 {
() => {
mod std {
pub struct io(pub ());
}
}
}
pub use ::std::io as std_io;
pub use self::std::io as local_io;
m2!();
}
// Test that we can refer to the external crate unqualified
// (when there isn't a local item with the same name).
use std::io;
mod bar {
// Also test the unqualified external crate import in a nested module,
// to show that the above import doesn't resolve through a local `std`
// item, e.g., the automatically injected `extern crate std;`, which in
// the Rust 2018 should no longer be visible through `crate::std`.
pub use std::io;
}
fn main() {
foo::Foo(());
let _ = foo::std_io::stdout();
foo::local_io(());
let _ = io::stdout();
let _ = bar::io::stdout();
}
|
random_line_split
|
|
macros-nested.rs
|
// This test is similar to `macros.rs`, but nested in modules.
// run-pass
// edition:2018
#![allow(non_camel_case_types)]
mod foo {
// Test that ambiguity errors are not emitted between `self::test` and
// `::test`, assuming the latter (crate) is not in `extern_prelude`.
macro_rules! m1 {
() => {
mod test {
pub struct Foo(pub ());
}
}
}
pub use test::Foo;
m1!();
// Test that qualified paths can refer to both the external crate and local item.
macro_rules! m2 {
() => {
mod std {
pub struct io(pub ());
}
}
}
pub use ::std::io as std_io;
pub use self::std::io as local_io;
m2!();
}
// Test that we can refer to the external crate unqualified
// (when there isn't a local item with the same name).
use std::io;
mod bar {
// Also test the unqualified external crate import in a nested module,
// to show that the above import doesn't resolve through a local `std`
// item, e.g., the automatically injected `extern crate std;`, which in
// the Rust 2018 should no longer be visible through `crate::std`.
pub use std::io;
}
fn main()
|
{
foo::Foo(());
let _ = foo::std_io::stdout();
foo::local_io(());
let _ = io::stdout();
let _ = bar::io::stdout();
}
|
identifier_body
|
|
macros-nested.rs
|
// This test is similar to `macros.rs`, but nested in modules.
// run-pass
// edition:2018
#![allow(non_camel_case_types)]
mod foo {
// Test that ambiguity errors are not emitted between `self::test` and
// `::test`, assuming the latter (crate) is not in `extern_prelude`.
macro_rules! m1 {
() => {
mod test {
pub struct Foo(pub ());
}
}
}
pub use test::Foo;
m1!();
// Test that qualified paths can refer to both the external crate and local item.
macro_rules! m2 {
() => {
mod std {
pub struct io(pub ());
}
}
}
pub use ::std::io as std_io;
pub use self::std::io as local_io;
m2!();
}
// Test that we can refer to the external crate unqualified
// (when there isn't a local item with the same name).
use std::io;
mod bar {
// Also test the unqualified external crate import in a nested module,
// to show that the above import doesn't resolve through a local `std`
// item, e.g., the automatically injected `extern crate std;`, which in
// the Rust 2018 should no longer be visible through `crate::std`.
pub use std::io;
}
fn
|
() {
foo::Foo(());
let _ = foo::std_io::stdout();
foo::local_io(());
let _ = io::stdout();
let _ = bar::io::stdout();
}
|
main
|
identifier_name
|
textdecoder.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::TextDecoderBinding;
use crate::dom::bindings::codegen::Bindings::TextDecoderBinding::{
TextDecodeOptions, TextDecoderMethods,
};
use crate::dom::bindings::codegen::UnionTypes::ArrayBufferViewOrArrayBuffer;
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::{DOMString, USVString};
use crate::dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
use encoding_rs::{Decoder, DecoderResult, Encoding};
use std::borrow::ToOwned;
use std::cell::{Cell, RefCell};
#[dom_struct]
#[allow(non_snake_case)]
pub struct TextDecoder {
reflector_: Reflector,
encoding: &'static Encoding,
fatal: bool,
ignoreBOM: bool,
#[ignore_malloc_size_of = "defined in encoding_rs"]
decoder: RefCell<Decoder>,
in_stream: RefCell<Vec<u8>>,
do_not_flush: Cell<bool>,
}
#[allow(non_snake_case)]
impl TextDecoder {
fn new_inherited(encoding: &'static Encoding, fatal: bool, ignoreBOM: bool) -> TextDecoder {
TextDecoder {
reflector_: Reflector::new(),
encoding: encoding,
fatal: fatal,
ignoreBOM: ignoreBOM,
decoder: RefCell::new(if ignoreBOM {
encoding.new_decoder()
} else {
encoding.new_decoder_without_bom_handling()
}),
in_stream: RefCell::new(Vec::new()),
do_not_flush: Cell::new(false),
}
}
fn make_range_error() -> Fallible<DomRoot<TextDecoder>> {
Err(Error::Range(
"The given encoding is not supported.".to_owned(),
))
}
pub fn new(
global: &GlobalScope,
encoding: &'static Encoding,
fatal: bool,
ignoreBOM: bool,
) -> DomRoot<TextDecoder> {
reflect_dom_object(
Box::new(TextDecoder::new_inherited(encoding, fatal, ignoreBOM)),
global,
TextDecoderBinding::Wrap,
)
}
/// <https://encoding.spec.whatwg.org/#dom-textdecoder>
pub fn Constructor(
global: &GlobalScope,
label: DOMString,
options: &TextDecoderBinding::TextDecoderOptions,
) -> Fallible<DomRoot<TextDecoder>> {
let encoding = match Encoding::for_label_no_replacement(label.as_bytes()) {
None => return TextDecoder::make_range_error(),
Some(enc) => enc,
};
Ok(TextDecoder::new(
global,
encoding,
options.fatal,
options.ignoreBOM,
))
}
}
impl TextDecoderMethods for TextDecoder {
// https://encoding.spec.whatwg.org/#dom-textdecoder-encoding
fn Encoding(&self) -> DOMString {
DOMString::from(self.encoding.name().to_ascii_lowercase())
}
// https://encoding.spec.whatwg.org/#dom-textdecoder-fatal
fn Fatal(&self) -> bool
|
// https://encoding.spec.whatwg.org/#dom-textdecoder-ignorebom
fn IgnoreBOM(&self) -> bool {
self.ignoreBOM
}
// https://encoding.spec.whatwg.org/#dom-textdecoder-decode
fn Decode(
&self,
input: Option<ArrayBufferViewOrArrayBuffer>,
options: &TextDecodeOptions,
) -> Fallible<USVString> {
// Step 1.
if!self.do_not_flush.get() {
if self.ignoreBOM {
self.decoder
.replace(self.encoding.new_decoder_without_bom_handling());
} else {
self.decoder.replace(self.encoding.new_decoder());
}
self.in_stream.replace(Vec::new());
}
// Step 2.
self.do_not_flush.set(options.stream);
// Step 3.
match input {
Some(ArrayBufferViewOrArrayBuffer::ArrayBufferView(ref a)) => {
self.in_stream.borrow_mut().extend_from_slice(&a.to_vec());
},
Some(ArrayBufferViewOrArrayBuffer::ArrayBuffer(ref a)) => {
self.in_stream.borrow_mut().extend_from_slice(&a.to_vec());
},
None => {},
};
let mut decoder = self.decoder.borrow_mut();
let (remaining, s) = {
let mut in_stream = self.in_stream.borrow_mut();
let (remaining, s) = if self.fatal {
// Step 4.
let mut out_stream = String::with_capacity(
decoder
.max_utf8_buffer_length_without_replacement(in_stream.len())
.unwrap(),
);
// Step 5: Implemented by encoding_rs::Decoder.
match decoder.decode_to_string_without_replacement(
&in_stream,
&mut out_stream,
!options.stream,
) {
(DecoderResult::InputEmpty, read) => (in_stream.split_off(read), out_stream),
// Step 5.3.3.
_ => return Err(Error::Type("Decoding failed".to_owned())),
}
} else {
// Step 4.
let mut out_stream =
String::with_capacity(decoder.max_utf8_buffer_length(in_stream.len()).unwrap());
// Step 5: Implemented by encoding_rs::Decoder.
let (_result, read, _replaced) =
decoder.decode_to_string(&in_stream, &mut out_stream,!options.stream);
(in_stream.split_off(read), out_stream)
};
(remaining, s)
};
self.in_stream.replace(remaining);
Ok(USVString(s))
}
}
|
{
self.fatal
}
|
identifier_body
|
textdecoder.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::TextDecoderBinding;
use crate::dom::bindings::codegen::Bindings::TextDecoderBinding::{
TextDecodeOptions, TextDecoderMethods,
};
use crate::dom::bindings::codegen::UnionTypes::ArrayBufferViewOrArrayBuffer;
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::{DOMString, USVString};
use crate::dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
use encoding_rs::{Decoder, DecoderResult, Encoding};
use std::borrow::ToOwned;
use std::cell::{Cell, RefCell};
#[dom_struct]
#[allow(non_snake_case)]
pub struct TextDecoder {
reflector_: Reflector,
encoding: &'static Encoding,
fatal: bool,
ignoreBOM: bool,
#[ignore_malloc_size_of = "defined in encoding_rs"]
decoder: RefCell<Decoder>,
in_stream: RefCell<Vec<u8>>,
do_not_flush: Cell<bool>,
}
#[allow(non_snake_case)]
impl TextDecoder {
fn new_inherited(encoding: &'static Encoding, fatal: bool, ignoreBOM: bool) -> TextDecoder {
TextDecoder {
reflector_: Reflector::new(),
encoding: encoding,
fatal: fatal,
ignoreBOM: ignoreBOM,
decoder: RefCell::new(if ignoreBOM {
encoding.new_decoder()
} else {
encoding.new_decoder_without_bom_handling()
}),
in_stream: RefCell::new(Vec::new()),
do_not_flush: Cell::new(false),
}
}
fn make_range_error() -> Fallible<DomRoot<TextDecoder>> {
Err(Error::Range(
"The given encoding is not supported.".to_owned(),
))
}
pub fn new(
global: &GlobalScope,
encoding: &'static Encoding,
fatal: bool,
ignoreBOM: bool,
) -> DomRoot<TextDecoder> {
reflect_dom_object(
Box::new(TextDecoder::new_inherited(encoding, fatal, ignoreBOM)),
global,
TextDecoderBinding::Wrap,
)
}
/// <https://encoding.spec.whatwg.org/#dom-textdecoder>
pub fn Constructor(
global: &GlobalScope,
label: DOMString,
options: &TextDecoderBinding::TextDecoderOptions,
) -> Fallible<DomRoot<TextDecoder>> {
let encoding = match Encoding::for_label_no_replacement(label.as_bytes()) {
None => return TextDecoder::make_range_error(),
Some(enc) => enc,
};
Ok(TextDecoder::new(
global,
encoding,
options.fatal,
options.ignoreBOM,
))
}
}
impl TextDecoderMethods for TextDecoder {
// https://encoding.spec.whatwg.org/#dom-textdecoder-encoding
fn Encoding(&self) -> DOMString {
DOMString::from(self.encoding.name().to_ascii_lowercase())
}
// https://encoding.spec.whatwg.org/#dom-textdecoder-fatal
fn Fatal(&self) -> bool {
self.fatal
}
// https://encoding.spec.whatwg.org/#dom-textdecoder-ignorebom
fn IgnoreBOM(&self) -> bool {
self.ignoreBOM
}
// https://encoding.spec.whatwg.org/#dom-textdecoder-decode
fn
|
(
&self,
input: Option<ArrayBufferViewOrArrayBuffer>,
options: &TextDecodeOptions,
) -> Fallible<USVString> {
// Step 1.
if!self.do_not_flush.get() {
if self.ignoreBOM {
self.decoder
.replace(self.encoding.new_decoder_without_bom_handling());
} else {
self.decoder.replace(self.encoding.new_decoder());
}
self.in_stream.replace(Vec::new());
}
// Step 2.
self.do_not_flush.set(options.stream);
// Step 3.
match input {
Some(ArrayBufferViewOrArrayBuffer::ArrayBufferView(ref a)) => {
self.in_stream.borrow_mut().extend_from_slice(&a.to_vec());
},
Some(ArrayBufferViewOrArrayBuffer::ArrayBuffer(ref a)) => {
self.in_stream.borrow_mut().extend_from_slice(&a.to_vec());
},
None => {},
};
let mut decoder = self.decoder.borrow_mut();
let (remaining, s) = {
let mut in_stream = self.in_stream.borrow_mut();
let (remaining, s) = if self.fatal {
// Step 4.
let mut out_stream = String::with_capacity(
decoder
.max_utf8_buffer_length_without_replacement(in_stream.len())
.unwrap(),
);
// Step 5: Implemented by encoding_rs::Decoder.
match decoder.decode_to_string_without_replacement(
&in_stream,
&mut out_stream,
!options.stream,
) {
(DecoderResult::InputEmpty, read) => (in_stream.split_off(read), out_stream),
// Step 5.3.3.
_ => return Err(Error::Type("Decoding failed".to_owned())),
}
} else {
// Step 4.
let mut out_stream =
String::with_capacity(decoder.max_utf8_buffer_length(in_stream.len()).unwrap());
// Step 5: Implemented by encoding_rs::Decoder.
let (_result, read, _replaced) =
decoder.decode_to_string(&in_stream, &mut out_stream,!options.stream);
(in_stream.split_off(read), out_stream)
};
(remaining, s)
};
self.in_stream.replace(remaining);
Ok(USVString(s))
}
}
|
Decode
|
identifier_name
|
textdecoder.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::TextDecoderBinding;
use crate::dom::bindings::codegen::Bindings::TextDecoderBinding::{
TextDecodeOptions, TextDecoderMethods,
};
use crate::dom::bindings::codegen::UnionTypes::ArrayBufferViewOrArrayBuffer;
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::{DOMString, USVString};
use crate::dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
use encoding_rs::{Decoder, DecoderResult, Encoding};
use std::borrow::ToOwned;
use std::cell::{Cell, RefCell};
#[dom_struct]
#[allow(non_snake_case)]
pub struct TextDecoder {
reflector_: Reflector,
encoding: &'static Encoding,
fatal: bool,
ignoreBOM: bool,
#[ignore_malloc_size_of = "defined in encoding_rs"]
decoder: RefCell<Decoder>,
in_stream: RefCell<Vec<u8>>,
do_not_flush: Cell<bool>,
}
#[allow(non_snake_case)]
impl TextDecoder {
fn new_inherited(encoding: &'static Encoding, fatal: bool, ignoreBOM: bool) -> TextDecoder {
TextDecoder {
reflector_: Reflector::new(),
encoding: encoding,
fatal: fatal,
ignoreBOM: ignoreBOM,
decoder: RefCell::new(if ignoreBOM {
encoding.new_decoder()
} else {
encoding.new_decoder_without_bom_handling()
}),
in_stream: RefCell::new(Vec::new()),
do_not_flush: Cell::new(false),
}
}
fn make_range_error() -> Fallible<DomRoot<TextDecoder>> {
Err(Error::Range(
"The given encoding is not supported.".to_owned(),
))
}
pub fn new(
global: &GlobalScope,
encoding: &'static Encoding,
fatal: bool,
ignoreBOM: bool,
) -> DomRoot<TextDecoder> {
reflect_dom_object(
Box::new(TextDecoder::new_inherited(encoding, fatal, ignoreBOM)),
global,
TextDecoderBinding::Wrap,
)
}
/// <https://encoding.spec.whatwg.org/#dom-textdecoder>
pub fn Constructor(
global: &GlobalScope,
label: DOMString,
options: &TextDecoderBinding::TextDecoderOptions,
) -> Fallible<DomRoot<TextDecoder>> {
let encoding = match Encoding::for_label_no_replacement(label.as_bytes()) {
None => return TextDecoder::make_range_error(),
Some(enc) => enc,
};
Ok(TextDecoder::new(
global,
encoding,
options.fatal,
options.ignoreBOM,
))
}
}
impl TextDecoderMethods for TextDecoder {
// https://encoding.spec.whatwg.org/#dom-textdecoder-encoding
fn Encoding(&self) -> DOMString {
DOMString::from(self.encoding.name().to_ascii_lowercase())
}
// https://encoding.spec.whatwg.org/#dom-textdecoder-fatal
fn Fatal(&self) -> bool {
self.fatal
}
// https://encoding.spec.whatwg.org/#dom-textdecoder-ignorebom
fn IgnoreBOM(&self) -> bool {
self.ignoreBOM
}
// https://encoding.spec.whatwg.org/#dom-textdecoder-decode
fn Decode(
&self,
input: Option<ArrayBufferViewOrArrayBuffer>,
options: &TextDecodeOptions,
) -> Fallible<USVString> {
// Step 1.
if!self.do_not_flush.get() {
if self.ignoreBOM {
self.decoder
.replace(self.encoding.new_decoder_without_bom_handling());
} else {
self.decoder.replace(self.encoding.new_decoder());
}
self.in_stream.replace(Vec::new());
}
// Step 2.
self.do_not_flush.set(options.stream);
// Step 3.
match input {
Some(ArrayBufferViewOrArrayBuffer::ArrayBufferView(ref a)) => {
self.in_stream.borrow_mut().extend_from_slice(&a.to_vec());
},
Some(ArrayBufferViewOrArrayBuffer::ArrayBuffer(ref a)) => {
self.in_stream.borrow_mut().extend_from_slice(&a.to_vec());
},
None => {},
};
let mut decoder = self.decoder.borrow_mut();
let (remaining, s) = {
let mut in_stream = self.in_stream.borrow_mut();
|
// Step 4.
let mut out_stream = String::with_capacity(
decoder
.max_utf8_buffer_length_without_replacement(in_stream.len())
.unwrap(),
);
// Step 5: Implemented by encoding_rs::Decoder.
match decoder.decode_to_string_without_replacement(
&in_stream,
&mut out_stream,
!options.stream,
) {
(DecoderResult::InputEmpty, read) => (in_stream.split_off(read), out_stream),
// Step 5.3.3.
_ => return Err(Error::Type("Decoding failed".to_owned())),
}
} else {
// Step 4.
let mut out_stream =
String::with_capacity(decoder.max_utf8_buffer_length(in_stream.len()).unwrap());
// Step 5: Implemented by encoding_rs::Decoder.
let (_result, read, _replaced) =
decoder.decode_to_string(&in_stream, &mut out_stream,!options.stream);
(in_stream.split_off(read), out_stream)
};
(remaining, s)
};
self.in_stream.replace(remaining);
Ok(USVString(s))
}
}
|
let (remaining, s) = if self.fatal {
|
random_line_split
|
no_0023_merge_k_sorted_lists.rs
|
// Definition for singly-linked list.
#[derive(PartialEq, Eq, Clone, Debug)]
pub struct ListNode {
pub val: i32,
pub next: Option<Box<ListNode>>,
}
impl ListNode {
#[inline]
fn new(val: i32) -> Self {
ListNode { next: None, val }
}
}
use std::cmp::{Ord, Ordering, PartialEq};
use std::collections::BinaryHeap;
impl Ord for ListNode {
fn cmp(&self, other: &Self) -> Ordering {
// 默认是最大堆,这里颠倒顺序,实现最小堆。
other.val.cmp(&self.val)
}
}
impl PartialOrd for ListNode {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
struct Solution;
impl Solution {
pub fn merge_k_lists(lists: Vec<Option<Box<ListNode>>>) -> Option<Box<ListNode>> {
if lists.is_empty() {
return None;
}
let mut ans = Box::new(ListNode::new(0));
let mut ptr = &mut ans;
let mut heap = BinaryHeap::new();
// 把第一列的元素放到堆里。
for node in lists {
if let Some(n) = node {
heap.push(n);
}
}
// 弹出最小的,然后把它剩下的再加入到堆中。
while let Some(mut node) = heap.pop() {
if let Some(next) = node.next.take() {
heap.push(next);
}
ptr.next = Some(node);
ptr = ptr.next.as_mut().unwrap();
}
ans.next
}
// 这种是利用数组来生成最终的结果,因为不知道怎么在链表后面插入元素。
pub fn merge_k_lists1(lists: Vec<Option<Box<ListNode>>>) -> Option<Box<ListNode>> {
if lists.is_empty() {
return None;
}
// 用数组来暂存结果。
let mut ans = Vec::new();
let mut heap = BinaryHeap::new();
// 把第一列的元素放到堆里。
for node in lists {
if let Some(n) = node {
heap.push(n);
}
}
// 弹出最小的,然后把它剩下的再加入到堆中。
while let Some(mut node) = heap.pop() {
println!("弹出:{}", node.val);
if let Some(next) = node.next.take() {
heap.push(next);
}
ans.push(node);
}
// 把数组转换成链表
let mut root = None;
for mut node in ans.into_iter().rev() {
node.next = root;
root = Some(node);
}
root
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_merge_k_lists() {
|
node(ListNode {
val: 1,
next: node(ListNode {
val: 4,
next: node(ListNode::new(5)),
}),
}),
node(ListNode {
val: 1,
next: node(ListNode {
val: 3,
next: node(ListNode::new(4)),
}),
}),
node(ListNode {
val: 2,
next: node(ListNode { val: 6, next: None }),
}),
];
let ans = Solution::merge_k_lists(lists);
let want = node(ListNode {
val: 1,
next: node(ListNode {
val: 1,
next: node(ListNode {
val: 2,
next: node(ListNode {
val: 3,
next: node(ListNode {
val: 4,
next: node(ListNode {
val: 4,
next: node(ListNode {
val: 5,
next: node(ListNode::new(6)),
}),
}),
}),
}),
}),
}),
});
assert_eq!(ans, want);
}
fn node(n: ListNode) -> Option<Box<ListNode>> {
Some(Box::new(n))
}
}
|
let lists = vec![
|
random_line_split
|
no_0023_merge_k_sorted_lists.rs
|
// Definition for singly-linked list.
#[derive(PartialEq, Eq, Clone, Debug)]
pub struct ListNode {
pub val: i32,
pub next: Option<Box<ListNode>>,
}
impl ListNode {
#[inline]
fn new(val: i32) -> Self {
ListNode { next: None, val }
}
}
use std::cmp::{Ord, Ordering, PartialEq};
use std::collections::BinaryHeap;
impl Ord for ListNode {
fn cmp(&self, other: &Self) -> Ordering {
// 默认是最大堆,这里颠倒顺序,实现最小堆。
other.val.cmp(&self.val)
}
}
impl PartialOrd for ListNode {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
struct Solution;
impl Solution {
pub fn merge_k_lists(lists: Vec<Option<Box<ListNode>>>) -> Option<Box<ListNode>> {
if lists.is_empty() {
return None;
}
let mut ans = Box::new(ListNode::new(0));
let mut ptr = &mut ans;
let mut heap = BinaryHeap::new();
// 把第一列的元素放到堆里。
for node in lists {
if let Some(n) = node {
heap.push(n);
}
}
// 弹出最小的,然后把它剩下的再加入到堆中。
while let Some(mut node) = heap.pop() {
if let Some(next) = node.next.take() {
heap.push(next);
}
ptr.next = Some(node);
ptr = pt
|
.next
}
// 这种是利用数组来生成最终的结果,因为不知道怎么在链表后面插入元素。
pub fn merge_k_lists1(lists: Vec<Option<Box<ListNode>>>) -> Option<Box<ListNode>> {
if lists.is_empty() {
return None;
}
// 用数组来暂存结果。
let mut ans = Vec::new();
let mut heap = BinaryHeap::new();
// 把第一列的元素放到堆里。
for node in lists {
if let Some(n) = node {
heap.push(n);
}
}
// 弹出最小的,然后把它剩下的再加入到堆中。
while let Some(mut node) = heap.pop() {
println!("弹出:{}", node.val);
if let Some(next) = node.next.take() {
heap.push(next);
}
ans.push(node);
}
// 把数组转换成链表
let mut root = None;
for mut node in ans.into_iter().rev() {
node.next = root;
root = Some(node);
}
root
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_merge_k_lists() {
let lists = vec![
node(ListNode {
val: 1,
next: node(ListNode {
val: 4,
next: node(ListNode::new(5)),
}),
}),
node(ListNode {
val: 1,
next: node(ListNode {
val: 3,
next: node(ListNode::new(4)),
}),
}),
node(ListNode {
val: 2,
next: node(ListNode { val: 6, next: None }),
}),
];
let ans = Solution::merge_k_lists(lists);
let want = node(ListNode {
val: 1,
next: node(ListNode {
val: 1,
next: node(ListNode {
val: 2,
next: node(ListNode {
val: 3,
next: node(ListNode {
val: 4,
next: node(ListNode {
val: 4,
next: node(ListNode {
val: 5,
next: node(ListNode::new(6)),
}),
}),
}),
}),
}),
}),
});
assert_eq!(ans, want);
}
fn node(n: ListNode) -> Option<Box<ListNode>> {
Some(Box::new(n))
}
}
|
r.next.as_mut().unwrap();
}
ans
|
conditional_block
|
no_0023_merge_k_sorted_lists.rs
|
// Definition for singly-linked list.
#[derive(PartialEq, Eq, Clone, Debug)]
pub struct ListNode {
pub val: i32,
pub next: Option<Box<ListNode>>,
}
impl ListNode {
#[inline]
fn
|
(val: i32) -> Self {
ListNode { next: None, val }
}
}
use std::cmp::{Ord, Ordering, PartialEq};
use std::collections::BinaryHeap;
impl Ord for ListNode {
fn cmp(&self, other: &Self) -> Ordering {
// 默认是最大堆,这里颠倒顺序,实现最小堆。
other.val.cmp(&self.val)
}
}
impl PartialOrd for ListNode {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
struct Solution;
impl Solution {
pub fn merge_k_lists(lists: Vec<Option<Box<ListNode>>>) -> Option<Box<ListNode>> {
if lists.is_empty() {
return None;
}
let mut ans = Box::new(ListNode::new(0));
let mut ptr = &mut ans;
let mut heap = BinaryHeap::new();
// 把第一列的元素放到堆里。
for node in lists {
if let Some(n) = node {
heap.push(n);
}
}
// 弹出最小的,然后把它剩下的再加入到堆中。
while let Some(mut node) = heap.pop() {
if let Some(next) = node.next.take() {
heap.push(next);
}
ptr.next = Some(node);
ptr = ptr.next.as_mut().unwrap();
}
ans.next
}
// 这种是利用数组来生成最终的结果,因为不知道怎么在链表后面插入元素。
pub fn merge_k_lists1(lists: Vec<Option<Box<ListNode>>>) -> Option<Box<ListNode>> {
if lists.is_empty() {
return None;
}
// 用数组来暂存结果。
let mut ans = Vec::new();
let mut heap = BinaryHeap::new();
// 把第一列的元素放到堆里。
for node in lists {
if let Some(n) = node {
heap.push(n);
}
}
// 弹出最小的,然后把它剩下的再加入到堆中。
while let Some(mut node) = heap.pop() {
println!("弹出:{}", node.val);
if let Some(next) = node.next.take() {
heap.push(next);
}
ans.push(node);
}
// 把数组转换成链表
let mut root = None;
for mut node in ans.into_iter().rev() {
node.next = root;
root = Some(node);
}
root
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_merge_k_lists() {
let lists = vec![
node(ListNode {
val: 1,
next: node(ListNode {
val: 4,
next: node(ListNode::new(5)),
}),
}),
node(ListNode {
val: 1,
next: node(ListNode {
val: 3,
next: node(ListNode::new(4)),
}),
}),
node(ListNode {
val: 2,
next: node(ListNode { val: 6, next: None }),
}),
];
let ans = Solution::merge_k_lists(lists);
let want = node(ListNode {
val: 1,
next: node(ListNode {
val: 1,
next: node(ListNode {
val: 2,
next: node(ListNode {
val: 3,
next: node(ListNode {
val: 4,
next: node(ListNode {
val: 4,
next: node(ListNode {
val: 5,
next: node(ListNode::new(6)),
}),
}),
}),
}),
}),
}),
});
assert_eq!(ans, want);
}
fn node(n: ListNode) -> Option<Box<ListNode>> {
Some(Box::new(n))
}
}
|
new
|
identifier_name
|
no_0023_merge_k_sorted_lists.rs
|
// Definition for singly-linked list.
#[derive(PartialEq, Eq, Clone, Debug)]
pub struct ListNode {
pub val: i32,
pub next: Option<Box<ListNode>>,
}
impl ListNode {
#[inline]
fn new(val: i32) -> Self {
ListNode { next: None, val }
}
}
use std::cmp::{Ord, Ordering, PartialEq};
use std::collections::BinaryHeap;
impl Ord for ListNode {
fn cmp(&self, other: &Self) -> Ordering
|
n partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
struct Solution;
impl Solution {
pub fn merge_k_lists(lists: Vec<Option<Box<ListNode>>>) -> Option<Box<ListNode>> {
if lists.is_empty() {
return None;
}
let mut ans = Box::new(ListNode::new(0));
let mut ptr = &mut ans;
let mut heap = BinaryHeap::new();
// 把第一列的元素放到堆里。
for node in lists {
if let Some(n) = node {
heap.push(n);
}
}
// 弹出最小的,然后把它剩下的再加入到堆中。
while let Some(mut node) = heap.pop() {
if let Some(next) = node.next.take() {
heap.push(next);
}
ptr.next = Some(node);
ptr = ptr.next.as_mut().unwrap();
}
ans.next
}
// 这种是利用数组来生成最终的结果,因为不知道怎么在链表后面插入元素。
pub fn merge_k_lists1(lists: Vec<Option<Box<ListNode>>>) -> Option<Box<ListNode>> {
if lists.is_empty() {
return None;
}
// 用数组来暂存结果。
let mut ans = Vec::new();
let mut heap = BinaryHeap::new();
// 把第一列的元素放到堆里。
for node in lists {
if let Some(n) = node {
heap.push(n);
}
}
// 弹出最小的,然后把它剩下的再加入到堆中。
while let Some(mut node) = heap.pop() {
println!("弹出:{}", node.val);
if let Some(next) = node.next.take() {
heap.push(next);
}
ans.push(node);
}
// 把数组转换成链表
let mut root = None;
for mut node in ans.into_iter().rev() {
node.next = root;
root = Some(node);
}
root
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_merge_k_lists() {
let lists = vec![
node(ListNode {
val: 1,
next: node(ListNode {
val: 4,
next: node(ListNode::new(5)),
}),
}),
node(ListNode {
val: 1,
next: node(ListNode {
val: 3,
next: node(ListNode::new(4)),
}),
}),
node(ListNode {
val: 2,
next: node(ListNode { val: 6, next: None }),
}),
];
let ans = Solution::merge_k_lists(lists);
let want = node(ListNode {
val: 1,
next: node(ListNode {
val: 1,
next: node(ListNode {
val: 2,
next: node(ListNode {
val: 3,
next: node(ListNode {
val: 4,
next: node(ListNode {
val: 4,
next: node(ListNode {
val: 5,
next: node(ListNode::new(6)),
}),
}),
}),
}),
}),
}),
});
assert_eq!(ans, want);
}
fn node(n: ListNode) -> Option<Box<ListNode>> {
Some(Box::new(n))
}
}
|
{
// 默认是最大堆,这里颠倒顺序,实现最小堆。
other.val.cmp(&self.val)
}
}
impl PartialOrd for ListNode {
f
|
identifier_body
|
autoderef-method-priority.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(managed_boxes)]
use std::gc::{GC, Gc};
trait double {
fn double(self) -> uint;
}
impl double for uint {
fn double(self) -> uint { self }
}
impl double for Gc<uint> {
fn double(self) -> uint { *self * 2u }
}
pub fn
|
() {
let x = box(GC) 3u;
assert_eq!(x.double(), 6u);
}
|
main
|
identifier_name
|
autoderef-method-priority.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(managed_boxes)]
use std::gc::{GC, Gc};
trait double {
fn double(self) -> uint;
}
|
impl double for uint {
fn double(self) -> uint { self }
}
impl double for Gc<uint> {
fn double(self) -> uint { *self * 2u }
}
pub fn main() {
let x = box(GC) 3u;
assert_eq!(x.double(), 6u);
}
|
random_line_split
|
|
autoderef-method-priority.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(managed_boxes)]
use std::gc::{GC, Gc};
trait double {
fn double(self) -> uint;
}
impl double for uint {
fn double(self) -> uint
|
}
impl double for Gc<uint> {
fn double(self) -> uint { *self * 2u }
}
pub fn main() {
let x = box(GC) 3u;
assert_eq!(x.double(), 6u);
}
|
{ self }
|
identifier_body
|
wmap.rs
|
#[macro_use]
extern crate cli_util;
extern crate wlib;
use std::env;
use wlib::window;
|
#[derive(Copy, Clone)]
enum Mode {
Map,
Unmap
}
fn main() {
let name = cli_util::name(&mut env::args());
parse_args!{
description: "map or unmap window",
flag mode: Mode = Mode::Map,
(&["-m", "--map"], Mode::Map, "map window (default)"),
(&["-u", "--unmap"], Mode::Unmap, "unmap window"),
arg wid: window::ID,
("wid", "window id")
}
cli_util::handle_error(&name, 1, run(mode, wid));
}
fn run(mode: Mode, wid: window::ID) -> Result<(), &'static str> {
let disp = try!(wlib::Display::open());
let mut win = try!(
disp.window(wid).map_err(|_| "window does not exist")
);
match mode {
Mode::Map => try!(win.map()),
Mode::Unmap => try!(win.unmap())
}
Ok(())
}
|
random_line_split
|
|
wmap.rs
|
#[macro_use]
extern crate cli_util;
extern crate wlib;
use std::env;
use wlib::window;
#[derive(Copy, Clone)]
enum Mode {
Map,
Unmap
}
fn main() {
let name = cli_util::name(&mut env::args());
parse_args!{
description: "map or unmap window",
flag mode: Mode = Mode::Map,
(&["-m", "--map"], Mode::Map, "map window (default)"),
(&["-u", "--unmap"], Mode::Unmap, "unmap window"),
arg wid: window::ID,
("wid", "window id")
}
cli_util::handle_error(&name, 1, run(mode, wid));
}
fn
|
(mode: Mode, wid: window::ID) -> Result<(), &'static str> {
let disp = try!(wlib::Display::open());
let mut win = try!(
disp.window(wid).map_err(|_| "window does not exist")
);
match mode {
Mode::Map => try!(win.map()),
Mode::Unmap => try!(win.unmap())
}
Ok(())
}
|
run
|
identifier_name
|
pin.rs
|
// Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*!
Pin configuration.
Some pins that could be configured here may be missing from actual MCU depending
on the package.
*/
use core::intrinsics::abort;
use core::option::Option;
use core::marker::Copy;
use self::Port::*;
#[path="../../util/ioreg.rs"]
#[macro_use] mod ioreg;
/// Available port names.
#[allow(missing_docs)]
#[derive(Copy)]
pub enum Port {
Port0,
Port1,
Port2,
Port3,
|
/// Pin functions (GPIO or up to three additional functions).
#[derive(PartialEq)]
#[allow(missing_docs)]
pub enum Function {
Gpio = 0,
AltFunction1 = 1,
AltFunction2 = 2,
AltFunction3 = 3,
}
impl Copy for Function {}
/// Structure to describe the location of a pin
#[derive(Copy)]
pub struct Pin {
/// Port the pin is attached to
port: Port,
/// Pin number in the port
pin: u8
}
impl Pin {
/// Create and setup a Pin
pub fn new(port: Port, pin_index: u8, function: Function,
gpiodir: Option<::hal::pin::GpioDirection>) -> Pin {
let pin = Pin {
port: port,
pin: pin_index,
};
pin.setup_regs(function, gpiodir);
pin
}
fn setup_regs(&self, function: Function,
gpiodir: Option<::hal::pin::GpioDirection>) {
let (offset, reg) = self.get_pinsel_reg_and_offset();
let fun_bits: u32 = (function as u32) << ((offset as uint) * 2);
let mask_bits: u32 =!(3u32 << ((offset as uint) * 2));
let val: u32 = reg.value();
let new_val = (val & mask_bits) | fun_bits;
reg.set_value(new_val);
if function == Function::Gpio {
(self as &::hal::pin::Gpio).set_direction(gpiodir.unwrap());
}
}
fn gpioreg(&self) -> ®::Gpio {
match self.port {
Port0 => ®::GPIO_0,
Port1 => ®::GPIO_1,
Port2 => ®::GPIO_2,
Port3 => ®::GPIO_3,
Port4 => ®::GPIO_4,
}
}
fn get_pinsel_reg_and_offset(&self) -> (u8, ®::PINSEL) {
match self.port {
Port0 => match self.pin {
0...15 => (self.pin, ®::PINSEL0),
16...30 => (self.pin-16, ®::PINSEL1),
_ => unsafe { abort() },
},
Port1 => match self.pin {
0...15 => (self.pin, ®::PINSEL2),
16...31 => (self.pin-16, ®::PINSEL3),
_ => unsafe { abort() },
},
Port2 => match self.pin {
0...13 => (self.pin, ®::PINSEL4),
_ => unsafe { abort() },
},
Port3 => match self.pin {
25|26 => (self.pin-16, ®::PINSEL7),
_ => unsafe { abort() },
},
Port4 => match self.pin {
28|29 => (self.pin-16, ®::PINSEL9),
_ => unsafe { abort() },
},
}
}
}
impl ::hal::pin::Gpio for Pin {
/// Sets output GPIO value to high.
fn set_high(&self) {
self.gpioreg().set_FIOSET(1 << (self.pin as uint));
}
/// Sets output GPIO value to low.
fn set_low(&self) {
self.gpioreg().set_FIOCLR(1 << (self.pin as uint));
}
/// Returns input GPIO level.
fn level(&self) -> ::hal::pin::GpioLevel {
let bit: u32 = 1 << (self.pin as uint);
let reg = self.gpioreg();
match reg.FIOPIN() & bit {
0 => ::hal::pin::Low,
_ => ::hal::pin::High,
}
}
/// Sets output GPIO direction.
fn set_direction(&self, new_mode: ::hal::pin::GpioDirection) {
let bit: u32 = 1 << (self.pin as uint);
let mask: u32 =!bit;
let reg = self.gpioreg();
let val: u32 = reg.FIODIR();
let new_val: u32 = match new_mode {
::hal::pin::In => val & mask,
::hal::pin::Out => (val & mask) | bit,
};
reg.set_FIODIR(new_val);
}
}
/// Sets the state of trace port interface.
pub fn set_trace_port_interface_enabled(enabled: bool) {
let value: u32 = if enabled { 0b1000 } else { 0 };
reg::PINSEL10.set_value(value);
}
mod reg {
use util::volatile_cell::VolatileCell;
ioreg_old!(PINSEL: u32, value);
reg_rw!(PINSEL, u32, value, set_value, value);
extern {
#[link_name="lpc17xx_iomem_PINSEL0"] pub static PINSEL0: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL1"] pub static PINSEL1: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL2"] pub static PINSEL2: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL3"] pub static PINSEL3: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL4"] pub static PINSEL4: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL7"] pub static PINSEL7: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL9"] pub static PINSEL9: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL10"] pub static PINSEL10: PINSEL;
}
ioreg_old!(Gpio: u32, FIODIR, _r0, _r1, _r2, FIOMASK, FIOPIN, FIOSET, FIOCLR);
reg_rw!(Gpio, u32, FIODIR, set_FIODIR, FIODIR);
reg_rw!(Gpio, u32, FIOMASK, set_FIOMASK, FIOMASK);
reg_rw!(Gpio, u32, FIOPIN, set_FIOPIN, FIOPIN);
reg_rw!(Gpio, u32, FIOSET, set_FIOSET, FIOSET);
reg_rw!(Gpio, u32, FIOCLR, set_FIOCLR, FIOCLR);
extern {
#[link_name="lpc17xx_iomem_GPIO0"] pub static GPIO_0: Gpio;
#[link_name="lpc17xx_iomem_GPIO1"] pub static GPIO_1: Gpio;
#[link_name="lpc17xx_iomem_GPIO2"] pub static GPIO_2: Gpio;
#[link_name="lpc17xx_iomem_GPIO3"] pub static GPIO_3: Gpio;
#[link_name="lpc17xx_iomem_GPIO4"] pub static GPIO_4: Gpio;
}
}
|
Port4,
}
|
random_line_split
|
pin.rs
|
// Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*!
Pin configuration.
Some pins that could be configured here may be missing from actual MCU depending
on the package.
*/
use core::intrinsics::abort;
use core::option::Option;
use core::marker::Copy;
use self::Port::*;
#[path="../../util/ioreg.rs"]
#[macro_use] mod ioreg;
/// Available port names.
#[allow(missing_docs)]
#[derive(Copy)]
pub enum Port {
Port0,
Port1,
Port2,
Port3,
Port4,
}
/// Pin functions (GPIO or up to three additional functions).
#[derive(PartialEq)]
#[allow(missing_docs)]
pub enum Function {
Gpio = 0,
AltFunction1 = 1,
AltFunction2 = 2,
AltFunction3 = 3,
}
impl Copy for Function {}
/// Structure to describe the location of a pin
#[derive(Copy)]
pub struct Pin {
/// Port the pin is attached to
port: Port,
/// Pin number in the port
pin: u8
}
impl Pin {
/// Create and setup a Pin
pub fn new(port: Port, pin_index: u8, function: Function,
gpiodir: Option<::hal::pin::GpioDirection>) -> Pin {
let pin = Pin {
port: port,
pin: pin_index,
};
pin.setup_regs(function, gpiodir);
pin
}
fn setup_regs(&self, function: Function,
gpiodir: Option<::hal::pin::GpioDirection>) {
let (offset, reg) = self.get_pinsel_reg_and_offset();
let fun_bits: u32 = (function as u32) << ((offset as uint) * 2);
let mask_bits: u32 =!(3u32 << ((offset as uint) * 2));
let val: u32 = reg.value();
let new_val = (val & mask_bits) | fun_bits;
reg.set_value(new_val);
if function == Function::Gpio {
(self as &::hal::pin::Gpio).set_direction(gpiodir.unwrap());
}
}
fn gpioreg(&self) -> ®::Gpio
|
fn get_pinsel_reg_and_offset(&self) -> (u8, ®::PINSEL) {
match self.port {
Port0 => match self.pin {
0...15 => (self.pin, ®::PINSEL0),
16...30 => (self.pin-16, ®::PINSEL1),
_ => unsafe { abort() },
},
Port1 => match self.pin {
0...15 => (self.pin, ®::PINSEL2),
16...31 => (self.pin-16, ®::PINSEL3),
_ => unsafe { abort() },
},
Port2 => match self.pin {
0...13 => (self.pin, ®::PINSEL4),
_ => unsafe { abort() },
},
Port3 => match self.pin {
25|26 => (self.pin-16, ®::PINSEL7),
_ => unsafe { abort() },
},
Port4 => match self.pin {
28|29 => (self.pin-16, ®::PINSEL9),
_ => unsafe { abort() },
},
}
}
}
impl ::hal::pin::Gpio for Pin {
/// Sets output GPIO value to high.
fn set_high(&self) {
self.gpioreg().set_FIOSET(1 << (self.pin as uint));
}
/// Sets output GPIO value to low.
fn set_low(&self) {
self.gpioreg().set_FIOCLR(1 << (self.pin as uint));
}
/// Returns input GPIO level.
fn level(&self) -> ::hal::pin::GpioLevel {
let bit: u32 = 1 << (self.pin as uint);
let reg = self.gpioreg();
match reg.FIOPIN() & bit {
0 => ::hal::pin::Low,
_ => ::hal::pin::High,
}
}
/// Sets output GPIO direction.
fn set_direction(&self, new_mode: ::hal::pin::GpioDirection) {
let bit: u32 = 1 << (self.pin as uint);
let mask: u32 =!bit;
let reg = self.gpioreg();
let val: u32 = reg.FIODIR();
let new_val: u32 = match new_mode {
::hal::pin::In => val & mask,
::hal::pin::Out => (val & mask) | bit,
};
reg.set_FIODIR(new_val);
}
}
/// Sets the state of trace port interface.
pub fn set_trace_port_interface_enabled(enabled: bool) {
let value: u32 = if enabled { 0b1000 } else { 0 };
reg::PINSEL10.set_value(value);
}
mod reg {
use util::volatile_cell::VolatileCell;
ioreg_old!(PINSEL: u32, value);
reg_rw!(PINSEL, u32, value, set_value, value);
extern {
#[link_name="lpc17xx_iomem_PINSEL0"] pub static PINSEL0: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL1"] pub static PINSEL1: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL2"] pub static PINSEL2: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL3"] pub static PINSEL3: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL4"] pub static PINSEL4: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL7"] pub static PINSEL7: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL9"] pub static PINSEL9: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL10"] pub static PINSEL10: PINSEL;
}
ioreg_old!(Gpio: u32, FIODIR, _r0, _r1, _r2, FIOMASK, FIOPIN, FIOSET, FIOCLR);
reg_rw!(Gpio, u32, FIODIR, set_FIODIR, FIODIR);
reg_rw!(Gpio, u32, FIOMASK, set_FIOMASK, FIOMASK);
reg_rw!(Gpio, u32, FIOPIN, set_FIOPIN, FIOPIN);
reg_rw!(Gpio, u32, FIOSET, set_FIOSET, FIOSET);
reg_rw!(Gpio, u32, FIOCLR, set_FIOCLR, FIOCLR);
extern {
#[link_name="lpc17xx_iomem_GPIO0"] pub static GPIO_0: Gpio;
#[link_name="lpc17xx_iomem_GPIO1"] pub static GPIO_1: Gpio;
#[link_name="lpc17xx_iomem_GPIO2"] pub static GPIO_2: Gpio;
#[link_name="lpc17xx_iomem_GPIO3"] pub static GPIO_3: Gpio;
#[link_name="lpc17xx_iomem_GPIO4"] pub static GPIO_4: Gpio;
}
}
|
{
match self.port {
Port0 => ®::GPIO_0,
Port1 => ®::GPIO_1,
Port2 => ®::GPIO_2,
Port3 => ®::GPIO_3,
Port4 => ®::GPIO_4,
}
}
|
identifier_body
|
pin.rs
|
// Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*!
Pin configuration.
Some pins that could be configured here may be missing from actual MCU depending
on the package.
*/
use core::intrinsics::abort;
use core::option::Option;
use core::marker::Copy;
use self::Port::*;
#[path="../../util/ioreg.rs"]
#[macro_use] mod ioreg;
/// Available port names.
#[allow(missing_docs)]
#[derive(Copy)]
pub enum Port {
Port0,
Port1,
Port2,
Port3,
Port4,
}
/// Pin functions (GPIO or up to three additional functions).
#[derive(PartialEq)]
#[allow(missing_docs)]
pub enum Function {
Gpio = 0,
AltFunction1 = 1,
AltFunction2 = 2,
AltFunction3 = 3,
}
impl Copy for Function {}
/// Structure to describe the location of a pin
#[derive(Copy)]
pub struct
|
{
/// Port the pin is attached to
port: Port,
/// Pin number in the port
pin: u8
}
impl Pin {
/// Create and setup a Pin
pub fn new(port: Port, pin_index: u8, function: Function,
gpiodir: Option<::hal::pin::GpioDirection>) -> Pin {
let pin = Pin {
port: port,
pin: pin_index,
};
pin.setup_regs(function, gpiodir);
pin
}
fn setup_regs(&self, function: Function,
gpiodir: Option<::hal::pin::GpioDirection>) {
let (offset, reg) = self.get_pinsel_reg_and_offset();
let fun_bits: u32 = (function as u32) << ((offset as uint) * 2);
let mask_bits: u32 =!(3u32 << ((offset as uint) * 2));
let val: u32 = reg.value();
let new_val = (val & mask_bits) | fun_bits;
reg.set_value(new_val);
if function == Function::Gpio {
(self as &::hal::pin::Gpio).set_direction(gpiodir.unwrap());
}
}
fn gpioreg(&self) -> ®::Gpio {
match self.port {
Port0 => ®::GPIO_0,
Port1 => ®::GPIO_1,
Port2 => ®::GPIO_2,
Port3 => ®::GPIO_3,
Port4 => ®::GPIO_4,
}
}
fn get_pinsel_reg_and_offset(&self) -> (u8, ®::PINSEL) {
match self.port {
Port0 => match self.pin {
0...15 => (self.pin, ®::PINSEL0),
16...30 => (self.pin-16, ®::PINSEL1),
_ => unsafe { abort() },
},
Port1 => match self.pin {
0...15 => (self.pin, ®::PINSEL2),
16...31 => (self.pin-16, ®::PINSEL3),
_ => unsafe { abort() },
},
Port2 => match self.pin {
0...13 => (self.pin, ®::PINSEL4),
_ => unsafe { abort() },
},
Port3 => match self.pin {
25|26 => (self.pin-16, ®::PINSEL7),
_ => unsafe { abort() },
},
Port4 => match self.pin {
28|29 => (self.pin-16, ®::PINSEL9),
_ => unsafe { abort() },
},
}
}
}
impl ::hal::pin::Gpio for Pin {
/// Sets output GPIO value to high.
fn set_high(&self) {
self.gpioreg().set_FIOSET(1 << (self.pin as uint));
}
/// Sets output GPIO value to low.
fn set_low(&self) {
self.gpioreg().set_FIOCLR(1 << (self.pin as uint));
}
/// Returns input GPIO level.
fn level(&self) -> ::hal::pin::GpioLevel {
let bit: u32 = 1 << (self.pin as uint);
let reg = self.gpioreg();
match reg.FIOPIN() & bit {
0 => ::hal::pin::Low,
_ => ::hal::pin::High,
}
}
/// Sets output GPIO direction.
fn set_direction(&self, new_mode: ::hal::pin::GpioDirection) {
let bit: u32 = 1 << (self.pin as uint);
let mask: u32 =!bit;
let reg = self.gpioreg();
let val: u32 = reg.FIODIR();
let new_val: u32 = match new_mode {
::hal::pin::In => val & mask,
::hal::pin::Out => (val & mask) | bit,
};
reg.set_FIODIR(new_val);
}
}
/// Sets the state of trace port interface.
pub fn set_trace_port_interface_enabled(enabled: bool) {
let value: u32 = if enabled { 0b1000 } else { 0 };
reg::PINSEL10.set_value(value);
}
mod reg {
use util::volatile_cell::VolatileCell;
ioreg_old!(PINSEL: u32, value);
reg_rw!(PINSEL, u32, value, set_value, value);
extern {
#[link_name="lpc17xx_iomem_PINSEL0"] pub static PINSEL0: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL1"] pub static PINSEL1: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL2"] pub static PINSEL2: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL3"] pub static PINSEL3: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL4"] pub static PINSEL4: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL7"] pub static PINSEL7: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL9"] pub static PINSEL9: PINSEL;
#[link_name="lpc17xx_iomem_PINSEL10"] pub static PINSEL10: PINSEL;
}
ioreg_old!(Gpio: u32, FIODIR, _r0, _r1, _r2, FIOMASK, FIOPIN, FIOSET, FIOCLR);
reg_rw!(Gpio, u32, FIODIR, set_FIODIR, FIODIR);
reg_rw!(Gpio, u32, FIOMASK, set_FIOMASK, FIOMASK);
reg_rw!(Gpio, u32, FIOPIN, set_FIOPIN, FIOPIN);
reg_rw!(Gpio, u32, FIOSET, set_FIOSET, FIOSET);
reg_rw!(Gpio, u32, FIOCLR, set_FIOCLR, FIOCLR);
extern {
#[link_name="lpc17xx_iomem_GPIO0"] pub static GPIO_0: Gpio;
#[link_name="lpc17xx_iomem_GPIO1"] pub static GPIO_1: Gpio;
#[link_name="lpc17xx_iomem_GPIO2"] pub static GPIO_2: Gpio;
#[link_name="lpc17xx_iomem_GPIO3"] pub static GPIO_3: Gpio;
#[link_name="lpc17xx_iomem_GPIO4"] pub static GPIO_4: Gpio;
}
}
|
Pin
|
identifier_name
|
unboxed-closures-all-traits.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(lang_items, unboxed_closures)]
fn a<F:Fn(int, int) -> int>(f: F) -> int {
f(1, 2)
}
fn b<F:FnMut(int, int) -> int>(mut f: F) -> int {
f(3, 4)
}
|
fn main() {
let z: int = 7;
assert_eq!(a(move |x: int, y| x + y + z), 10);
assert_eq!(b(move |x: int, y| x + y + z), 14);
assert_eq!(c(move |x: int, y| x + y + z), 18);
}
|
fn c<F:FnOnce(int, int) -> int>(f: F) -> int {
f(5, 6)
}
|
random_line_split
|
unboxed-closures-all-traits.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(lang_items, unboxed_closures)]
fn a<F:Fn(int, int) -> int>(f: F) -> int {
f(1, 2)
}
fn
|
<F:FnMut(int, int) -> int>(mut f: F) -> int {
f(3, 4)
}
fn c<F:FnOnce(int, int) -> int>(f: F) -> int {
f(5, 6)
}
fn main() {
let z: int = 7;
assert_eq!(a(move |x: int, y| x + y + z), 10);
assert_eq!(b(move |x: int, y| x + y + z), 14);
assert_eq!(c(move |x: int, y| x + y + z), 18);
}
|
b
|
identifier_name
|
unboxed-closures-all-traits.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(lang_items, unboxed_closures)]
fn a<F:Fn(int, int) -> int>(f: F) -> int {
f(1, 2)
}
fn b<F:FnMut(int, int) -> int>(mut f: F) -> int
|
fn c<F:FnOnce(int, int) -> int>(f: F) -> int {
f(5, 6)
}
fn main() {
let z: int = 7;
assert_eq!(a(move |x: int, y| x + y + z), 10);
assert_eq!(b(move |x: int, y| x + y + z), 14);
assert_eq!(c(move |x: int, y| x + y + z), 18);
}
|
{
f(3, 4)
}
|
identifier_body
|
suggestions.rs
|
use app::App;
// Third Party
#[cfg(feature = "suggestions")]
use strsim;
// Internal
use fmt::Format;
/// Produces a string from a given list of possible values which is similar to
/// the passed in value `v` with a certain confidence.
/// Thus in a list of possible values like ["foo", "bar"], the value "fop" will yield
/// `Some("foo")`, whereas "blark" would yield `None`.
#[cfg(feature = "suggestions")]
#[cfg_attr(feature = "lints", allow(needless_lifetimes))]
pub fn did_you_mean<'a, T:?Sized, I>(v: &str, possible_values: I) -> Option<&'a str>
where
T: AsRef<str> + 'a,
I: IntoIterator<Item = &'a T>,
{
let mut candidate: Option<(f64, &str)> = None;
for pv in possible_values {
let confidence = strsim::jaro_winkler(v, pv.as_ref());
if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence))
{
candidate = Some((confidence, pv.as_ref()));
}
}
match candidate {
None => None,
Some((_, candidate)) => Some(candidate),
}
}
#[cfg(not(feature = "suggestions"))]
pub fn did_you_mean<'a, T:?Sized, I>(_: &str, _: I) -> Option<&'a str>
where
T: AsRef<str> + 'a,
I: IntoIterator<Item = &'a T>,
{
None
}
/// Returns a suffix that can be empty, or is the standard 'did you mean' phrase
#[cfg_attr(feature = "lints", allow(needless_lifetimes))]
pub fn
|
<'z, T, I>(
arg: &str,
args_rest: &'z [&str],
longs: I,
subcommands: &'z [App],
) -> (String, Option<&'z str>)
where
T: AsRef<str> + 'z,
I: IntoIterator<Item = &'z T>,
{
if let Some(candidate) = did_you_mean(arg, longs) {
let suffix = format!(
"\n\tDid you mean {}{}?",
Format::Good("--"),
Format::Good(candidate)
);
return (suffix, Some(candidate));
}
subcommands
.into_iter()
.filter_map(|subcommand| {
let opts = subcommand
.p
.flags
.iter()
.filter_map(|f| f.s.long)
.chain(subcommand.p.opts.iter().filter_map(|o| o.s.long));
let candidate = match did_you_mean(arg, opts) {
Some(candidate) => candidate,
None => return None,
};
let score = match args_rest.iter().position(|x| *x == subcommand.get_name()) {
Some(score) => score,
None => return None,
};
let suffix = format!(
"\n\tDid you mean to put '{}{}' after the subcommand '{}'?",
Format::Good("--"),
Format::Good(candidate),
Format::Good(subcommand.get_name())
);
Some((score, (suffix, Some(candidate))))
})
.min_by_key(|&(score, _)| score)
.map(|(_, suggestion)| suggestion)
.unwrap_or_else(|| (String::new(), None))
}
/// Returns a suffix that can be empty, or is the standard 'did you mean' phrase
pub fn did_you_mean_value_suffix<'z, T, I>(arg: &str, values: I) -> (String, Option<&'z str>)
where
T: AsRef<str> + 'z,
I: IntoIterator<Item = &'z T>,
{
match did_you_mean(arg, values) {
Some(candidate) => {
let suffix = format!("\n\tDid you mean '{}'?", Format::Good(candidate));
(suffix, Some(candidate))
}
None => (String::new(), None),
}
}
#[cfg(all(test, features = "suggestions"))]
mod test {
use super::*;
#[test]
fn possible_values_match() {
let p_vals = ["test", "possible", "values"];
assert_eq!(did_you_mean("tst", p_vals.iter()), Some("test"));
}
#[test]
fn possible_values_nomatch() {
let p_vals = ["test", "possible", "values"];
assert!(did_you_mean("hahaahahah", p_vals.iter()).is_none());
}
#[test]
fn suffix_long() {
let p_vals = ["test", "possible", "values"];
let suffix = "\n\tDid you mean \'--test\'?";
assert_eq!(
did_you_mean_flag_suffix("tst", p_vals.iter(), []),
(suffix, Some("test"))
);
}
#[test]
fn suffix_enum() {
let p_vals = ["test", "possible", "values"];
let suffix = "\n\tDid you mean \'test\'?";
assert_eq!(
did_you_mean_value_suffix("tst", p_vals.iter()),
(suffix, Some("test"))
);
}
}
|
did_you_mean_flag_suffix
|
identifier_name
|
suggestions.rs
|
use app::App;
// Third Party
#[cfg(feature = "suggestions")]
use strsim;
// Internal
use fmt::Format;
/// Produces a string from a given list of possible values which is similar to
/// the passed in value `v` with a certain confidence.
/// Thus in a list of possible values like ["foo", "bar"], the value "fop" will yield
/// `Some("foo")`, whereas "blark" would yield `None`.
#[cfg(feature = "suggestions")]
#[cfg_attr(feature = "lints", allow(needless_lifetimes))]
pub fn did_you_mean<'a, T:?Sized, I>(v: &str, possible_values: I) -> Option<&'a str>
where
T: AsRef<str> + 'a,
I: IntoIterator<Item = &'a T>,
{
let mut candidate: Option<(f64, &str)> = None;
for pv in possible_values {
let confidence = strsim::jaro_winkler(v, pv.as_ref());
if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence))
{
candidate = Some((confidence, pv.as_ref()));
}
}
match candidate {
None => None,
Some((_, candidate)) => Some(candidate),
}
}
#[cfg(not(feature = "suggestions"))]
pub fn did_you_mean<'a, T:?Sized, I>(_: &str, _: I) -> Option<&'a str>
where
T: AsRef<str> + 'a,
I: IntoIterator<Item = &'a T>,
{
None
}
/// Returns a suffix that can be empty, or is the standard 'did you mean' phrase
#[cfg_attr(feature = "lints", allow(needless_lifetimes))]
pub fn did_you_mean_flag_suffix<'z, T, I>(
arg: &str,
args_rest: &'z [&str],
longs: I,
subcommands: &'z [App],
) -> (String, Option<&'z str>)
where
T: AsRef<str> + 'z,
I: IntoIterator<Item = &'z T>,
{
if let Some(candidate) = did_you_mean(arg, longs)
|
subcommands
.into_iter()
.filter_map(|subcommand| {
let opts = subcommand
.p
.flags
.iter()
.filter_map(|f| f.s.long)
.chain(subcommand.p.opts.iter().filter_map(|o| o.s.long));
let candidate = match did_you_mean(arg, opts) {
Some(candidate) => candidate,
None => return None,
};
let score = match args_rest.iter().position(|x| *x == subcommand.get_name()) {
Some(score) => score,
None => return None,
};
let suffix = format!(
"\n\tDid you mean to put '{}{}' after the subcommand '{}'?",
Format::Good("--"),
Format::Good(candidate),
Format::Good(subcommand.get_name())
);
Some((score, (suffix, Some(candidate))))
})
.min_by_key(|&(score, _)| score)
.map(|(_, suggestion)| suggestion)
.unwrap_or_else(|| (String::new(), None))
}
/// Returns a suffix that can be empty, or is the standard 'did you mean' phrase
pub fn did_you_mean_value_suffix<'z, T, I>(arg: &str, values: I) -> (String, Option<&'z str>)
where
T: AsRef<str> + 'z,
I: IntoIterator<Item = &'z T>,
{
match did_you_mean(arg, values) {
Some(candidate) => {
let suffix = format!("\n\tDid you mean '{}'?", Format::Good(candidate));
(suffix, Some(candidate))
}
None => (String::new(), None),
}
}
#[cfg(all(test, features = "suggestions"))]
mod test {
use super::*;
#[test]
fn possible_values_match() {
let p_vals = ["test", "possible", "values"];
assert_eq!(did_you_mean("tst", p_vals.iter()), Some("test"));
}
#[test]
fn possible_values_nomatch() {
let p_vals = ["test", "possible", "values"];
assert!(did_you_mean("hahaahahah", p_vals.iter()).is_none());
}
#[test]
fn suffix_long() {
let p_vals = ["test", "possible", "values"];
let suffix = "\n\tDid you mean \'--test\'?";
assert_eq!(
did_you_mean_flag_suffix("tst", p_vals.iter(), []),
(suffix, Some("test"))
);
}
#[test]
fn suffix_enum() {
let p_vals = ["test", "possible", "values"];
let suffix = "\n\tDid you mean \'test\'?";
assert_eq!(
did_you_mean_value_suffix("tst", p_vals.iter()),
(suffix, Some("test"))
);
}
}
|
{
let suffix = format!(
"\n\tDid you mean {}{}?",
Format::Good("--"),
Format::Good(candidate)
);
return (suffix, Some(candidate));
}
|
conditional_block
|
suggestions.rs
|
use app::App;
// Third Party
#[cfg(feature = "suggestions")]
use strsim;
// Internal
use fmt::Format;
/// Produces a string from a given list of possible values which is similar to
/// the passed in value `v` with a certain confidence.
/// Thus in a list of possible values like ["foo", "bar"], the value "fop" will yield
/// `Some("foo")`, whereas "blark" would yield `None`.
#[cfg(feature = "suggestions")]
#[cfg_attr(feature = "lints", allow(needless_lifetimes))]
pub fn did_you_mean<'a, T:?Sized, I>(v: &str, possible_values: I) -> Option<&'a str>
where
T: AsRef<str> + 'a,
I: IntoIterator<Item = &'a T>,
{
let mut candidate: Option<(f64, &str)> = None;
for pv in possible_values {
let confidence = strsim::jaro_winkler(v, pv.as_ref());
if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence))
{
candidate = Some((confidence, pv.as_ref()));
}
}
match candidate {
None => None,
Some((_, candidate)) => Some(candidate),
}
}
#[cfg(not(feature = "suggestions"))]
pub fn did_you_mean<'a, T:?Sized, I>(_: &str, _: I) -> Option<&'a str>
where
T: AsRef<str> + 'a,
I: IntoIterator<Item = &'a T>,
|
/// Returns a suffix that can be empty, or is the standard 'did you mean' phrase
#[cfg_attr(feature = "lints", allow(needless_lifetimes))]
pub fn did_you_mean_flag_suffix<'z, T, I>(
arg: &str,
args_rest: &'z [&str],
longs: I,
subcommands: &'z [App],
) -> (String, Option<&'z str>)
where
T: AsRef<str> + 'z,
I: IntoIterator<Item = &'z T>,
{
if let Some(candidate) = did_you_mean(arg, longs) {
let suffix = format!(
"\n\tDid you mean {}{}?",
Format::Good("--"),
Format::Good(candidate)
);
return (suffix, Some(candidate));
}
subcommands
.into_iter()
.filter_map(|subcommand| {
let opts = subcommand
.p
.flags
.iter()
.filter_map(|f| f.s.long)
.chain(subcommand.p.opts.iter().filter_map(|o| o.s.long));
let candidate = match did_you_mean(arg, opts) {
Some(candidate) => candidate,
None => return None,
};
let score = match args_rest.iter().position(|x| *x == subcommand.get_name()) {
Some(score) => score,
None => return None,
};
let suffix = format!(
"\n\tDid you mean to put '{}{}' after the subcommand '{}'?",
Format::Good("--"),
Format::Good(candidate),
Format::Good(subcommand.get_name())
);
Some((score, (suffix, Some(candidate))))
})
.min_by_key(|&(score, _)| score)
.map(|(_, suggestion)| suggestion)
.unwrap_or_else(|| (String::new(), None))
}
/// Returns a suffix that can be empty, or is the standard 'did you mean' phrase
pub fn did_you_mean_value_suffix<'z, T, I>(arg: &str, values: I) -> (String, Option<&'z str>)
where
T: AsRef<str> + 'z,
I: IntoIterator<Item = &'z T>,
{
match did_you_mean(arg, values) {
Some(candidate) => {
let suffix = format!("\n\tDid you mean '{}'?", Format::Good(candidate));
(suffix, Some(candidate))
}
None => (String::new(), None),
}
}
#[cfg(all(test, features = "suggestions"))]
mod test {
use super::*;
#[test]
fn possible_values_match() {
let p_vals = ["test", "possible", "values"];
assert_eq!(did_you_mean("tst", p_vals.iter()), Some("test"));
}
#[test]
fn possible_values_nomatch() {
let p_vals = ["test", "possible", "values"];
assert!(did_you_mean("hahaahahah", p_vals.iter()).is_none());
}
#[test]
fn suffix_long() {
let p_vals = ["test", "possible", "values"];
let suffix = "\n\tDid you mean \'--test\'?";
assert_eq!(
did_you_mean_flag_suffix("tst", p_vals.iter(), []),
(suffix, Some("test"))
);
}
#[test]
fn suffix_enum() {
let p_vals = ["test", "possible", "values"];
let suffix = "\n\tDid you mean \'test\'?";
assert_eq!(
did_you_mean_value_suffix("tst", p_vals.iter()),
(suffix, Some("test"))
);
}
}
|
{
None
}
|
identifier_body
|
suggestions.rs
|
use app::App;
// Third Party
#[cfg(feature = "suggestions")]
use strsim;
// Internal
use fmt::Format;
/// Produces a string from a given list of possible values which is similar to
/// the passed in value `v` with a certain confidence.
/// Thus in a list of possible values like ["foo", "bar"], the value "fop" will yield
/// `Some("foo")`, whereas "blark" would yield `None`.
#[cfg(feature = "suggestions")]
#[cfg_attr(feature = "lints", allow(needless_lifetimes))]
pub fn did_you_mean<'a, T:?Sized, I>(v: &str, possible_values: I) -> Option<&'a str>
where
T: AsRef<str> + 'a,
I: IntoIterator<Item = &'a T>,
{
let mut candidate: Option<(f64, &str)> = None;
for pv in possible_values {
let confidence = strsim::jaro_winkler(v, pv.as_ref());
if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence))
{
candidate = Some((confidence, pv.as_ref()));
}
}
match candidate {
None => None,
Some((_, candidate)) => Some(candidate),
}
}
#[cfg(not(feature = "suggestions"))]
|
I: IntoIterator<Item = &'a T>,
{
None
}
/// Returns a suffix that can be empty, or is the standard 'did you mean' phrase
#[cfg_attr(feature = "lints", allow(needless_lifetimes))]
pub fn did_you_mean_flag_suffix<'z, T, I>(
arg: &str,
args_rest: &'z [&str],
longs: I,
subcommands: &'z [App],
) -> (String, Option<&'z str>)
where
T: AsRef<str> + 'z,
I: IntoIterator<Item = &'z T>,
{
if let Some(candidate) = did_you_mean(arg, longs) {
let suffix = format!(
"\n\tDid you mean {}{}?",
Format::Good("--"),
Format::Good(candidate)
);
return (suffix, Some(candidate));
}
subcommands
.into_iter()
.filter_map(|subcommand| {
let opts = subcommand
.p
.flags
.iter()
.filter_map(|f| f.s.long)
.chain(subcommand.p.opts.iter().filter_map(|o| o.s.long));
let candidate = match did_you_mean(arg, opts) {
Some(candidate) => candidate,
None => return None,
};
let score = match args_rest.iter().position(|x| *x == subcommand.get_name()) {
Some(score) => score,
None => return None,
};
let suffix = format!(
"\n\tDid you mean to put '{}{}' after the subcommand '{}'?",
Format::Good("--"),
Format::Good(candidate),
Format::Good(subcommand.get_name())
);
Some((score, (suffix, Some(candidate))))
})
.min_by_key(|&(score, _)| score)
.map(|(_, suggestion)| suggestion)
.unwrap_or_else(|| (String::new(), None))
}
/// Returns a suffix that can be empty, or is the standard 'did you mean' phrase
pub fn did_you_mean_value_suffix<'z, T, I>(arg: &str, values: I) -> (String, Option<&'z str>)
where
T: AsRef<str> + 'z,
I: IntoIterator<Item = &'z T>,
{
match did_you_mean(arg, values) {
Some(candidate) => {
let suffix = format!("\n\tDid you mean '{}'?", Format::Good(candidate));
(suffix, Some(candidate))
}
None => (String::new(), None),
}
}
#[cfg(all(test, features = "suggestions"))]
mod test {
use super::*;
#[test]
fn possible_values_match() {
let p_vals = ["test", "possible", "values"];
assert_eq!(did_you_mean("tst", p_vals.iter()), Some("test"));
}
#[test]
fn possible_values_nomatch() {
let p_vals = ["test", "possible", "values"];
assert!(did_you_mean("hahaahahah", p_vals.iter()).is_none());
}
#[test]
fn suffix_long() {
let p_vals = ["test", "possible", "values"];
let suffix = "\n\tDid you mean \'--test\'?";
assert_eq!(
did_you_mean_flag_suffix("tst", p_vals.iter(), []),
(suffix, Some("test"))
);
}
#[test]
fn suffix_enum() {
let p_vals = ["test", "possible", "values"];
let suffix = "\n\tDid you mean \'test\'?";
assert_eq!(
did_you_mean_value_suffix("tst", p_vals.iter()),
(suffix, Some("test"))
);
}
}
|
pub fn did_you_mean<'a, T: ?Sized, I>(_: &str, _: I) -> Option<&'a str>
where
T: AsRef<str> + 'a,
|
random_line_split
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::RGBA;
use std::ops::Deref;
use string_cache::{Atom, Namespace};
use util::str::{DOMString, LengthOrPercentageOrAuto, parse_unsigned_integer, parse_legacy_color, parse_length};
use util::str::{split_html_space_chars, str_join};
use values::specified::{Length};
// Duplicated from script::dom::values.
const UNSIGNED_LONG_MAX: u32 = 2147483647;
#[derive(PartialEq, Clone, HeapSizeOf)]
pub enum AttrValue {
String(DOMString),
TokenList(DOMString, Vec<Atom>),
UInt(DOMString, u32),
Atom(Atom),
Length(DOMString, Option<Length>),
Color(DOMString, Option<RGBA>),
Dimension(DOMString, LengthOrPercentageOrAuto),
}
impl AttrValue {
pub fn from_serialized_tokenlist(tokens: DOMString) -> AttrValue {
let atoms =
split_html_space_chars(&tokens)
.map(Atom::from)
.fold(vec![], |mut acc, atom| {
if!acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_atomic_tokens(atoms: Vec<Atom>) -> AttrValue {
// TODO(ajeffrey): effecient conversion of Vec<Atom> to DOMString
let tokens = DOMString::from(str_join(&atoms, "\x20"));
AttrValue::TokenList(tokens, atoms)
}
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-unsigned-long
pub fn from_u32(string: DOMString, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers-greater-than-zero
pub fn from_limited_u32(string: DOMString, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result == 0 || result > UNSIGNED_LONG_MAX {
default
} else {
result
};
|
pub fn from_atomic(string: DOMString) -> AttrValue {
// FIXME(ajeffrey): convert directly from DOMString to Atom
let value = Atom::from(&*string);
AttrValue::Atom(value)
}
pub fn from_legacy_color(string: DOMString) -> AttrValue {
let parsed = parse_legacy_color(&string).ok();
AttrValue::Color(string, parsed)
}
pub fn from_dimension(string: DOMString) -> AttrValue {
let parsed = parse_length(&string);
AttrValue::Dimension(string, parsed)
}
/// Assumes the `AttrValue` is a `TokenList` and returns its tokens
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `TokenList`
pub fn as_tokens(&self) -> &[Atom] {
match *self {
AttrValue::TokenList(_, ref tokens) => tokens,
_ => panic!("Tokens not found"),
}
}
/// Assumes the `AttrValue` is an `Atom` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not an `Atom`
pub fn as_atom(&self) -> &Atom {
match *self {
AttrValue::Atom(ref value) => value,
_ => panic!("Atom not found"),
}
}
/// Assumes the `AttrValue` is a `Color` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Color`
pub fn as_color(&self) -> Option<&RGBA> {
match *self {
AttrValue::Color(_, ref color) => color.as_ref(),
_ => panic!("Color not found"),
}
}
/// Assumes the `AttrValue` is a `Length` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Length`
pub fn as_length(&self) -> Option<&Length> {
match *self {
AttrValue::Length(_, ref length) => length.as_ref(),
_ => panic!("Length not found"),
}
}
/// Assumes the `AttrValue` is a `Dimension` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Dimension`
pub fn as_dimension(&self) -> &LengthOrPercentageOrAuto {
match *self {
AttrValue::Dimension(_, ref l) => l,
_ => panic!("Dimension not found"),
}
}
/// Return the AttrValue as its integer representation, if any.
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `UInt`
pub fn as_uint(&self) -> u32 {
if let AttrValue::UInt(_, value) = *self {
value
} else {
panic!("Uint not found");
}
}
}
impl Deref for AttrValue {
type Target = str;
fn deref(&self) -> &str {
match *self {
AttrValue::String(ref value) |
AttrValue::TokenList(ref value, _) |
AttrValue::UInt(ref value, _) |
AttrValue::Length(ref value, _) |
AttrValue::Color(ref value, _) |
AttrValue::Dimension(ref value, _) => &value,
AttrValue::Atom(ref value) => &value,
}
}
}
#[derive(Clone, HeapSizeOf, Debug)]
pub struct AttrIdentifier {
pub local_name: Atom,
pub name: Atom,
pub namespace: Namespace,
pub prefix: Option<Atom>,
}
|
AttrValue::UInt(string, result)
}
|
random_line_split
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::RGBA;
use std::ops::Deref;
use string_cache::{Atom, Namespace};
use util::str::{DOMString, LengthOrPercentageOrAuto, parse_unsigned_integer, parse_legacy_color, parse_length};
use util::str::{split_html_space_chars, str_join};
use values::specified::{Length};
// Duplicated from script::dom::values.
const UNSIGNED_LONG_MAX: u32 = 2147483647;
#[derive(PartialEq, Clone, HeapSizeOf)]
pub enum AttrValue {
String(DOMString),
TokenList(DOMString, Vec<Atom>),
UInt(DOMString, u32),
Atom(Atom),
Length(DOMString, Option<Length>),
Color(DOMString, Option<RGBA>),
Dimension(DOMString, LengthOrPercentageOrAuto),
}
impl AttrValue {
pub fn from_serialized_tokenlist(tokens: DOMString) -> AttrValue {
let atoms =
split_html_space_chars(&tokens)
.map(Atom::from)
.fold(vec![], |mut acc, atom| {
if!acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_atomic_tokens(atoms: Vec<Atom>) -> AttrValue {
// TODO(ajeffrey): effecient conversion of Vec<Atom> to DOMString
let tokens = DOMString::from(str_join(&atoms, "\x20"));
AttrValue::TokenList(tokens, atoms)
}
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-unsigned-long
pub fn from_u32(string: DOMString, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers-greater-than-zero
pub fn from_limited_u32(string: DOMString, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result == 0 || result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_atomic(string: DOMString) -> AttrValue {
// FIXME(ajeffrey): convert directly from DOMString to Atom
let value = Atom::from(&*string);
AttrValue::Atom(value)
}
pub fn
|
(string: DOMString) -> AttrValue {
let parsed = parse_legacy_color(&string).ok();
AttrValue::Color(string, parsed)
}
pub fn from_dimension(string: DOMString) -> AttrValue {
let parsed = parse_length(&string);
AttrValue::Dimension(string, parsed)
}
/// Assumes the `AttrValue` is a `TokenList` and returns its tokens
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `TokenList`
pub fn as_tokens(&self) -> &[Atom] {
match *self {
AttrValue::TokenList(_, ref tokens) => tokens,
_ => panic!("Tokens not found"),
}
}
/// Assumes the `AttrValue` is an `Atom` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not an `Atom`
pub fn as_atom(&self) -> &Atom {
match *self {
AttrValue::Atom(ref value) => value,
_ => panic!("Atom not found"),
}
}
/// Assumes the `AttrValue` is a `Color` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Color`
pub fn as_color(&self) -> Option<&RGBA> {
match *self {
AttrValue::Color(_, ref color) => color.as_ref(),
_ => panic!("Color not found"),
}
}
/// Assumes the `AttrValue` is a `Length` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Length`
pub fn as_length(&self) -> Option<&Length> {
match *self {
AttrValue::Length(_, ref length) => length.as_ref(),
_ => panic!("Length not found"),
}
}
/// Assumes the `AttrValue` is a `Dimension` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Dimension`
pub fn as_dimension(&self) -> &LengthOrPercentageOrAuto {
match *self {
AttrValue::Dimension(_, ref l) => l,
_ => panic!("Dimension not found"),
}
}
/// Return the AttrValue as its integer representation, if any.
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `UInt`
pub fn as_uint(&self) -> u32 {
if let AttrValue::UInt(_, value) = *self {
value
} else {
panic!("Uint not found");
}
}
}
impl Deref for AttrValue {
type Target = str;
fn deref(&self) -> &str {
match *self {
AttrValue::String(ref value) |
AttrValue::TokenList(ref value, _) |
AttrValue::UInt(ref value, _) |
AttrValue::Length(ref value, _) |
AttrValue::Color(ref value, _) |
AttrValue::Dimension(ref value, _) => &value,
AttrValue::Atom(ref value) => &value,
}
}
}
#[derive(Clone, HeapSizeOf, Debug)]
pub struct AttrIdentifier {
pub local_name: Atom,
pub name: Atom,
pub namespace: Namespace,
pub prefix: Option<Atom>,
}
|
from_legacy_color
|
identifier_name
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::RGBA;
use std::ops::Deref;
use string_cache::{Atom, Namespace};
use util::str::{DOMString, LengthOrPercentageOrAuto, parse_unsigned_integer, parse_legacy_color, parse_length};
use util::str::{split_html_space_chars, str_join};
use values::specified::{Length};
// Duplicated from script::dom::values.
const UNSIGNED_LONG_MAX: u32 = 2147483647;
#[derive(PartialEq, Clone, HeapSizeOf)]
pub enum AttrValue {
String(DOMString),
TokenList(DOMString, Vec<Atom>),
UInt(DOMString, u32),
Atom(Atom),
Length(DOMString, Option<Length>),
Color(DOMString, Option<RGBA>),
Dimension(DOMString, LengthOrPercentageOrAuto),
}
impl AttrValue {
pub fn from_serialized_tokenlist(tokens: DOMString) -> AttrValue {
let atoms =
split_html_space_chars(&tokens)
.map(Atom::from)
.fold(vec![], |mut acc, atom| {
if!acc.contains(&atom) { acc.push(atom) }
acc
});
AttrValue::TokenList(tokens, atoms)
}
pub fn from_atomic_tokens(atoms: Vec<Atom>) -> AttrValue
|
// https://html.spec.whatwg.org/multipage/#reflecting-content-attributes-in-idl-attributes:idl-unsigned-long
pub fn from_u32(string: DOMString, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
// https://html.spec.whatwg.org/multipage/#limited-to-only-non-negative-numbers-greater-than-zero
pub fn from_limited_u32(string: DOMString, default: u32) -> AttrValue {
let result = parse_unsigned_integer(string.chars()).unwrap_or(default);
let result = if result == 0 || result > UNSIGNED_LONG_MAX {
default
} else {
result
};
AttrValue::UInt(string, result)
}
pub fn from_atomic(string: DOMString) -> AttrValue {
// FIXME(ajeffrey): convert directly from DOMString to Atom
let value = Atom::from(&*string);
AttrValue::Atom(value)
}
pub fn from_legacy_color(string: DOMString) -> AttrValue {
let parsed = parse_legacy_color(&string).ok();
AttrValue::Color(string, parsed)
}
pub fn from_dimension(string: DOMString) -> AttrValue {
let parsed = parse_length(&string);
AttrValue::Dimension(string, parsed)
}
/// Assumes the `AttrValue` is a `TokenList` and returns its tokens
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `TokenList`
pub fn as_tokens(&self) -> &[Atom] {
match *self {
AttrValue::TokenList(_, ref tokens) => tokens,
_ => panic!("Tokens not found"),
}
}
/// Assumes the `AttrValue` is an `Atom` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not an `Atom`
pub fn as_atom(&self) -> &Atom {
match *self {
AttrValue::Atom(ref value) => value,
_ => panic!("Atom not found"),
}
}
/// Assumes the `AttrValue` is a `Color` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Color`
pub fn as_color(&self) -> Option<&RGBA> {
match *self {
AttrValue::Color(_, ref color) => color.as_ref(),
_ => panic!("Color not found"),
}
}
/// Assumes the `AttrValue` is a `Length` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Length`
pub fn as_length(&self) -> Option<&Length> {
match *self {
AttrValue::Length(_, ref length) => length.as_ref(),
_ => panic!("Length not found"),
}
}
/// Assumes the `AttrValue` is a `Dimension` and returns its value
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `Dimension`
pub fn as_dimension(&self) -> &LengthOrPercentageOrAuto {
match *self {
AttrValue::Dimension(_, ref l) => l,
_ => panic!("Dimension not found"),
}
}
/// Return the AttrValue as its integer representation, if any.
/// This corresponds to attribute values returned as `AttrValue::UInt(_)`
/// by `VirtualMethods::parse_plain_attribute()`.
///
/// ## Panics
///
/// Panics if the `AttrValue` is not a `UInt`
pub fn as_uint(&self) -> u32 {
if let AttrValue::UInt(_, value) = *self {
value
} else {
panic!("Uint not found");
}
}
}
impl Deref for AttrValue {
type Target = str;
fn deref(&self) -> &str {
match *self {
AttrValue::String(ref value) |
AttrValue::TokenList(ref value, _) |
AttrValue::UInt(ref value, _) |
AttrValue::Length(ref value, _) |
AttrValue::Color(ref value, _) |
AttrValue::Dimension(ref value, _) => &value,
AttrValue::Atom(ref value) => &value,
}
}
}
#[derive(Clone, HeapSizeOf, Debug)]
pub struct AttrIdentifier {
pub local_name: Atom,
pub name: Atom,
pub namespace: Namespace,
pub prefix: Option<Atom>,
}
|
{
// TODO(ajeffrey): effecient conversion of Vec<Atom> to DOMString
let tokens = DOMString::from(str_join(&atoms, "\x20"));
AttrValue::TokenList(tokens, atoms)
}
|
identifier_body
|
unsized7.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
trait T {}
// I would like these to fail eventually.
// impl - bounded
trait T1<Z: T> {
}
struct S3<Y:?Sized>;
impl<X:?Sized + T> T1<X> for S3<X> {
//~^ ERROR `core::marker::Sized` is not implemented for the type `X`
}
fn main() { }
|
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test sized-ness checking in substitution in impls.
|
random_line_split
|
unsized7.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test sized-ness checking in substitution in impls.
trait T {}
// I would like these to fail eventually.
// impl - bounded
trait T1<Z: T> {
}
struct S3<Y:?Sized>;
impl<X:?Sized + T> T1<X> for S3<X> {
//~^ ERROR `core::marker::Sized` is not implemented for the type `X`
}
fn
|
() { }
|
main
|
identifier_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.