file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
cargo_clean.rs
|
use crate::core::InternedString;
use std::collections::HashMap;
use std::fs;
use std::path::Path;
use crate::core::compiler::unit_dependencies;
use crate::core::compiler::{BuildConfig, BuildContext, CompileKind, CompileMode, Context};
use crate::core::compiler::{RustcTargetData, UnitInterner};
use crate::core::profiles::{Profiles, UnitFor};
|
use crate::core::{PackageIdSpec, Workspace};
use crate::ops;
use crate::ops::resolve::WorkspaceResolve;
use crate::util::errors::{CargoResult, CargoResultExt};
use crate::util::paths;
use crate::util::Config;
pub struct CleanOptions<'a> {
pub config: &'a Config,
/// A list of packages to clean. If empty, everything is cleaned.
pub spec: Vec<String>,
/// The target arch triple to clean, or None for the host arch
pub target: Option<String>,
/// Whether to clean the release directory
pub profile_specified: bool,
/// Whether to clean the directory of a certain build profile
pub requested_profile: InternedString,
/// Whether to just clean the doc directory
pub doc: bool,
}
/// Cleans the package's build artifacts.
pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
let mut target_dir = ws.target_dir();
let config = ws.config();
// If the doc option is set, we just want to delete the doc directory.
if opts.doc {
target_dir = target_dir.join("doc");
return rm_rf(&target_dir.into_path_unlocked(), config);
}
let profiles = Profiles::new(ws.profiles(), config, opts.requested_profile, ws.features())?;
if opts.profile_specified {
// After parsing profiles we know the dir-name of the profile, if a profile
// was passed from the command line. If so, delete only the directory of
// that profile.
let dir_name = profiles.get_dir_name();
target_dir = target_dir.join(dir_name);
}
// If we have a spec, then we need to delete some packages, otherwise, just
// remove the whole target directory and be done with it!
//
// Note that we don't bother grabbing a lock here as we're just going to
// blow it all away anyway.
if opts.spec.is_empty() {
return rm_rf(&target_dir.into_path_unlocked(), config);
}
let mut build_config = BuildConfig::new(config, Some(1), &opts.target, CompileMode::Build)?;
build_config.requested_profile = opts.requested_profile;
let target_data = RustcTargetData::new(ws, build_config.requested_kind)?;
let resolve_opts = ResolveOpts::everything();
let specs = opts
.spec
.iter()
.map(|spec| PackageIdSpec::parse(spec))
.collect::<CargoResult<Vec<_>>>()?;
let ws_resolve = ops::resolve_ws_with_opts(
ws,
&target_data,
build_config.requested_kind,
&resolve_opts,
&specs,
HasDevUnits::Yes,
)?;
let WorkspaceResolve {
pkg_set,
targeted_resolve: resolve,
resolved_features: features,
..
} = ws_resolve;
let interner = UnitInterner::new();
let bcx = BuildContext::new(
ws,
&pkg_set,
opts.config,
&build_config,
profiles,
&interner,
HashMap::new(),
target_data,
)?;
let mut units = Vec::new();
for spec in opts.spec.iter() {
// Translate the spec to a Package
let pkgid = resolve.query(spec)?;
let pkg = pkg_set.get_one(pkgid)?;
// Generate all relevant `Unit` targets for this package
for target in pkg.targets() {
for kind in [CompileKind::Host, build_config.requested_kind].iter() {
for mode in CompileMode::all_modes() {
for unit_for in UnitFor::all_values() {
let profile = if mode.is_run_custom_build() {
bcx.profiles
.get_profile_run_custom_build(&bcx.profiles.get_profile(
pkg.package_id(),
ws.is_member(pkg),
*unit_for,
CompileMode::Build,
))
} else {
bcx.profiles.get_profile(
pkg.package_id(),
ws.is_member(pkg),
*unit_for,
*mode,
)
};
// Use unverified here since this is being more
// exhaustive than what is actually needed.
let features_for = unit_for.map_to_features_for();
let features =
features.activated_features_unverified(pkg.package_id(), features_for);
units.push(bcx.units.intern(
pkg, target, profile, *kind, *mode, features, /*is_std*/ false,
));
}
}
}
}
}
let unit_dependencies =
unit_dependencies::build_unit_dependencies(&bcx, &resolve, &features, None, &units, &[])?;
let mut cx = Context::new(config, &bcx, unit_dependencies, build_config.requested_kind)?;
cx.prepare_units(None, &units)?;
for unit in units.iter() {
if unit.mode.is_doc() || unit.mode.is_doc_test() {
// Cleaning individual rustdoc crates is currently not supported.
// For example, the search index would need to be rebuilt to fully
// remove it (otherwise you're left with lots of broken links).
// Doc tests produce no output.
continue;
}
rm_rf(&cx.files().fingerprint_dir(unit), config)?;
if unit.target.is_custom_build() {
if unit.mode.is_run_custom_build() {
rm_rf(&cx.files().build_script_out_dir(unit), config)?;
} else {
rm_rf(&cx.files().build_script_dir(unit), config)?;
}
continue;
}
for output in cx.outputs(unit)?.iter() {
rm_rf(&output.path, config)?;
if let Some(ref dst) = output.hardlink {
rm_rf(dst, config)?;
}
}
}
Ok(())
}
fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> {
let m = fs::metadata(path);
if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) {
config
.shell()
.verbose(|shell| shell.status("Removing", path.display()))?;
paths::remove_dir_all(path)
.chain_err(|| anyhow::format_err!("could not remove build directory"))?;
} else if m.is_ok() {
config
.shell()
.verbose(|shell| shell.status("Removing", path.display()))?;
paths::remove_file(path)
.chain_err(|| anyhow::format_err!("failed to remove build artifact"))?;
}
Ok(())
}
|
use crate::core::resolver::features::HasDevUnits;
use crate::core::resolver::ResolveOpts;
|
random_line_split
|
cargo_clean.rs
|
use crate::core::InternedString;
use std::collections::HashMap;
use std::fs;
use std::path::Path;
use crate::core::compiler::unit_dependencies;
use crate::core::compiler::{BuildConfig, BuildContext, CompileKind, CompileMode, Context};
use crate::core::compiler::{RustcTargetData, UnitInterner};
use crate::core::profiles::{Profiles, UnitFor};
use crate::core::resolver::features::HasDevUnits;
use crate::core::resolver::ResolveOpts;
use crate::core::{PackageIdSpec, Workspace};
use crate::ops;
use crate::ops::resolve::WorkspaceResolve;
use crate::util::errors::{CargoResult, CargoResultExt};
use crate::util::paths;
use crate::util::Config;
pub struct CleanOptions<'a> {
pub config: &'a Config,
/// A list of packages to clean. If empty, everything is cleaned.
pub spec: Vec<String>,
/// The target arch triple to clean, or None for the host arch
pub target: Option<String>,
/// Whether to clean the release directory
pub profile_specified: bool,
/// Whether to clean the directory of a certain build profile
pub requested_profile: InternedString,
/// Whether to just clean the doc directory
pub doc: bool,
}
/// Cleans the package's build artifacts.
pub fn
|
(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
let mut target_dir = ws.target_dir();
let config = ws.config();
// If the doc option is set, we just want to delete the doc directory.
if opts.doc {
target_dir = target_dir.join("doc");
return rm_rf(&target_dir.into_path_unlocked(), config);
}
let profiles = Profiles::new(ws.profiles(), config, opts.requested_profile, ws.features())?;
if opts.profile_specified {
// After parsing profiles we know the dir-name of the profile, if a profile
// was passed from the command line. If so, delete only the directory of
// that profile.
let dir_name = profiles.get_dir_name();
target_dir = target_dir.join(dir_name);
}
// If we have a spec, then we need to delete some packages, otherwise, just
// remove the whole target directory and be done with it!
//
// Note that we don't bother grabbing a lock here as we're just going to
// blow it all away anyway.
if opts.spec.is_empty() {
return rm_rf(&target_dir.into_path_unlocked(), config);
}
let mut build_config = BuildConfig::new(config, Some(1), &opts.target, CompileMode::Build)?;
build_config.requested_profile = opts.requested_profile;
let target_data = RustcTargetData::new(ws, build_config.requested_kind)?;
let resolve_opts = ResolveOpts::everything();
let specs = opts
.spec
.iter()
.map(|spec| PackageIdSpec::parse(spec))
.collect::<CargoResult<Vec<_>>>()?;
let ws_resolve = ops::resolve_ws_with_opts(
ws,
&target_data,
build_config.requested_kind,
&resolve_opts,
&specs,
HasDevUnits::Yes,
)?;
let WorkspaceResolve {
pkg_set,
targeted_resolve: resolve,
resolved_features: features,
..
} = ws_resolve;
let interner = UnitInterner::new();
let bcx = BuildContext::new(
ws,
&pkg_set,
opts.config,
&build_config,
profiles,
&interner,
HashMap::new(),
target_data,
)?;
let mut units = Vec::new();
for spec in opts.spec.iter() {
// Translate the spec to a Package
let pkgid = resolve.query(spec)?;
let pkg = pkg_set.get_one(pkgid)?;
// Generate all relevant `Unit` targets for this package
for target in pkg.targets() {
for kind in [CompileKind::Host, build_config.requested_kind].iter() {
for mode in CompileMode::all_modes() {
for unit_for in UnitFor::all_values() {
let profile = if mode.is_run_custom_build() {
bcx.profiles
.get_profile_run_custom_build(&bcx.profiles.get_profile(
pkg.package_id(),
ws.is_member(pkg),
*unit_for,
CompileMode::Build,
))
} else {
bcx.profiles.get_profile(
pkg.package_id(),
ws.is_member(pkg),
*unit_for,
*mode,
)
};
// Use unverified here since this is being more
// exhaustive than what is actually needed.
let features_for = unit_for.map_to_features_for();
let features =
features.activated_features_unverified(pkg.package_id(), features_for);
units.push(bcx.units.intern(
pkg, target, profile, *kind, *mode, features, /*is_std*/ false,
));
}
}
}
}
}
let unit_dependencies =
unit_dependencies::build_unit_dependencies(&bcx, &resolve, &features, None, &units, &[])?;
let mut cx = Context::new(config, &bcx, unit_dependencies, build_config.requested_kind)?;
cx.prepare_units(None, &units)?;
for unit in units.iter() {
if unit.mode.is_doc() || unit.mode.is_doc_test() {
// Cleaning individual rustdoc crates is currently not supported.
// For example, the search index would need to be rebuilt to fully
// remove it (otherwise you're left with lots of broken links).
// Doc tests produce no output.
continue;
}
rm_rf(&cx.files().fingerprint_dir(unit), config)?;
if unit.target.is_custom_build() {
if unit.mode.is_run_custom_build() {
rm_rf(&cx.files().build_script_out_dir(unit), config)?;
} else {
rm_rf(&cx.files().build_script_dir(unit), config)?;
}
continue;
}
for output in cx.outputs(unit)?.iter() {
rm_rf(&output.path, config)?;
if let Some(ref dst) = output.hardlink {
rm_rf(dst, config)?;
}
}
}
Ok(())
}
fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> {
let m = fs::metadata(path);
if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) {
config
.shell()
.verbose(|shell| shell.status("Removing", path.display()))?;
paths::remove_dir_all(path)
.chain_err(|| anyhow::format_err!("could not remove build directory"))?;
} else if m.is_ok() {
config
.shell()
.verbose(|shell| shell.status("Removing", path.display()))?;
paths::remove_file(path)
.chain_err(|| anyhow::format_err!("failed to remove build artifact"))?;
}
Ok(())
}
|
clean
|
identifier_name
|
example.rs
|
use std::rand::{task_rng, Rng};
use std::str;
struct Robot {
name: ~str
}
fn generateName() -> ~str {
let mut rng = task_rng();
let mut s = str::with_capacity(6);
static LETTERS: &'static [u8] = bytes!("ABCDEFGHIJKLMNOPQRSTUVWXYZ");
static NUMBERS: &'static [u8] = bytes!("0123456789");
for _ in range(0, 3) {
s.push_char(rng.choose(LETTERS) as char);
}
for _ in range(0, 3) {
s.push_char(rng.choose(NUMBERS) as char);
}
s
}
impl Robot {
pub fn new() -> Robot {
Robot { name: generateName() }
}
pub fn name<'a>(&'a self) -> &'a str {
|
}
pub fn resetName(&mut self) {
self.name = generateName();
}
}
|
self.name.as_slice()
|
random_line_split
|
example.rs
|
use std::rand::{task_rng, Rng};
use std::str;
struct Robot {
name: ~str
}
fn generateName() -> ~str {
let mut rng = task_rng();
let mut s = str::with_capacity(6);
static LETTERS: &'static [u8] = bytes!("ABCDEFGHIJKLMNOPQRSTUVWXYZ");
static NUMBERS: &'static [u8] = bytes!("0123456789");
for _ in range(0, 3) {
s.push_char(rng.choose(LETTERS) as char);
}
for _ in range(0, 3) {
s.push_char(rng.choose(NUMBERS) as char);
}
s
}
impl Robot {
pub fn
|
() -> Robot {
Robot { name: generateName() }
}
pub fn name<'a>(&'a self) -> &'a str {
self.name.as_slice()
}
pub fn resetName(&mut self) {
self.name = generateName();
}
}
|
new
|
identifier_name
|
example.rs
|
use std::rand::{task_rng, Rng};
use std::str;
struct Robot {
name: ~str
}
fn generateName() -> ~str
|
impl Robot {
pub fn new() -> Robot {
Robot { name: generateName() }
}
pub fn name<'a>(&'a self) -> &'a str {
self.name.as_slice()
}
pub fn resetName(&mut self) {
self.name = generateName();
}
}
|
{
let mut rng = task_rng();
let mut s = str::with_capacity(6);
static LETTERS: &'static [u8] = bytes!("ABCDEFGHIJKLMNOPQRSTUVWXYZ");
static NUMBERS: &'static [u8] = bytes!("0123456789");
for _ in range(0, 3) {
s.push_char(rng.choose(LETTERS) as char);
}
for _ in range(0, 3) {
s.push_char(rng.choose(NUMBERS) as char);
}
s
}
|
identifier_body
|
helper.rs
|
use std::io::Read;
use std::io::Result as IoResult;
pub fn int_to_vec(number: i32) -> Vec<u8> {
let number = number.to_string();
let mut result = Vec::with_capacity(number.len());
for &c in number.as_bytes().iter() {
result.push(c);
}
result
}
fn try_read<T: Read>(reader: &mut T, buf : &mut [u8], min_bytes : usize) -> IoResult<usize> {
let mut pos = 0;
let buf_len = buf.len();
while pos < min_bytes {
let buf1 = &mut buf[pos.. buf_len];
let n = try!(reader.read(buf1));
pos += n;
if n == 0
|
}
return Ok(pos);
}
fn read<T: Read>(reader: &mut T, buf : &mut [u8], min_bytes : usize) -> IoResult<usize> {
let n = try!(try_read(reader, buf, min_bytes));
if n < min_bytes {
Err(::std::io::Error::new(::std::io::ErrorKind::Other,
"Could not read enough bytes from Reader"))
} else {
Ok(n)
}
}
pub fn read_exact<T: Read>(reader: &mut T, len: usize) -> IoResult<Vec<u8>> {
let mut buf = Vec::with_capacity(len);
unsafe { buf.set_len(len); }
try!(read(reader, &mut buf, len));
Ok(buf)
}
|
{ return Ok(pos); }
|
conditional_block
|
helper.rs
|
use std::io::Read;
use std::io::Result as IoResult;
pub fn int_to_vec(number: i32) -> Vec<u8> {
let number = number.to_string();
let mut result = Vec::with_capacity(number.len());
for &c in number.as_bytes().iter() {
result.push(c);
}
result
}
fn try_read<T: Read>(reader: &mut T, buf : &mut [u8], min_bytes : usize) -> IoResult<usize>
|
fn read<T: Read>(reader: &mut T, buf : &mut [u8], min_bytes : usize) -> IoResult<usize> {
let n = try!(try_read(reader, buf, min_bytes));
if n < min_bytes {
Err(::std::io::Error::new(::std::io::ErrorKind::Other,
"Could not read enough bytes from Reader"))
} else {
Ok(n)
}
}
pub fn read_exact<T: Read>(reader: &mut T, len: usize) -> IoResult<Vec<u8>> {
let mut buf = Vec::with_capacity(len);
unsafe { buf.set_len(len); }
try!(read(reader, &mut buf, len));
Ok(buf)
}
|
{
let mut pos = 0;
let buf_len = buf.len();
while pos < min_bytes {
let buf1 = &mut buf[pos .. buf_len];
let n = try!(reader.read(buf1));
pos += n;
if n == 0 { return Ok(pos); }
}
return Ok(pos);
}
|
identifier_body
|
helper.rs
|
use std::io::Read;
use std::io::Result as IoResult;
pub fn int_to_vec(number: i32) -> Vec<u8> {
let number = number.to_string();
let mut result = Vec::with_capacity(number.len());
for &c in number.as_bytes().iter() {
result.push(c);
}
result
}
fn try_read<T: Read>(reader: &mut T, buf : &mut [u8], min_bytes : usize) -> IoResult<usize> {
let mut pos = 0;
let buf_len = buf.len();
while pos < min_bytes {
let buf1 = &mut buf[pos.. buf_len];
let n = try!(reader.read(buf1));
pos += n;
if n == 0 { return Ok(pos); }
}
return Ok(pos);
}
fn read<T: Read>(reader: &mut T, buf : &mut [u8], min_bytes : usize) -> IoResult<usize> {
let n = try!(try_read(reader, buf, min_bytes));
if n < min_bytes {
Err(::std::io::Error::new(::std::io::ErrorKind::Other,
"Could not read enough bytes from Reader"))
} else {
Ok(n)
}
}
pub fn
|
<T: Read>(reader: &mut T, len: usize) -> IoResult<Vec<u8>> {
let mut buf = Vec::with_capacity(len);
unsafe { buf.set_len(len); }
try!(read(reader, &mut buf, len));
Ok(buf)
}
|
read_exact
|
identifier_name
|
helper.rs
|
use std::io::Read;
use std::io::Result as IoResult;
pub fn int_to_vec(number: i32) -> Vec<u8> {
let number = number.to_string();
let mut result = Vec::with_capacity(number.len());
for &c in number.as_bytes().iter() {
result.push(c);
}
|
}
fn try_read<T: Read>(reader: &mut T, buf : &mut [u8], min_bytes : usize) -> IoResult<usize> {
let mut pos = 0;
let buf_len = buf.len();
while pos < min_bytes {
let buf1 = &mut buf[pos.. buf_len];
let n = try!(reader.read(buf1));
pos += n;
if n == 0 { return Ok(pos); }
}
return Ok(pos);
}
fn read<T: Read>(reader: &mut T, buf : &mut [u8], min_bytes : usize) -> IoResult<usize> {
let n = try!(try_read(reader, buf, min_bytes));
if n < min_bytes {
Err(::std::io::Error::new(::std::io::ErrorKind::Other,
"Could not read enough bytes from Reader"))
} else {
Ok(n)
}
}
pub fn read_exact<T: Read>(reader: &mut T, len: usize) -> IoResult<Vec<u8>> {
let mut buf = Vec::with_capacity(len);
unsafe { buf.set_len(len); }
try!(read(reader, &mut buf, len));
Ok(buf)
}
|
result
|
random_line_split
|
showcase.rs
|
/* Copyright 2016 Jordan Miner
*
* Licensed under the MIT license <LICENSE or
* http://opensource.org/licenses/MIT>. This file may not be copied,
* modified, or distributed except according to those terms.
*/
#[macro_use]
extern crate clear_coat;
extern crate iup_sys;
extern crate smallvec;
use std::rc::Rc;
use iup_sys::*;
use smallvec::SmallVec;
use clear_coat::*;
use clear_coat::common_attrs_cbs::*;
struct CursorsCanvas {
canvas: Canvas,
}
impl CursorsCanvas {
pub fn new() -> Self {
CursorsCanvas {
canvas: Canvas::new(),
}
}
}
impl CursorAttribute for CursorsCanvas {}
unsafe impl Control for CursorsCanvas {
fn handle(&self) -> *mut Ihandle {
self.canvas.handle()
}
}
fn create_cursors_page() -> Box<Control> {
let cursors_canvas = Rc::new(CursorsCanvas::new());
cursors_canvas.canvas.set_min_size(300, 200);
let radios_info = [
("None", Cursor::None),
("Arrow", Cursor::Arrow),
("Busy", Cursor::Busy),
("Cross", Cursor::Cross),
("Hand", Cursor::Hand),
("Help", Cursor::Help),
("Move", Cursor::Move),
("ResizeN", Cursor::ResizeN),
("ResizeS", Cursor::ResizeS),
("ResizeNS", Cursor::ResizeNS),
("ResizeW", Cursor::ResizeW),
("ResizeE", Cursor::ResizeE),
("ResizeWE", Cursor::ResizeWE),
("ResizeNE", Cursor::ResizeNE),
("ResizeSW", Cursor::ResizeSW),
("ResizeNW", Cursor::ResizeNW),
("ResizeSE", Cursor::ResizeSE),
("Text", Cursor::Text),
];
let mut radios = SmallVec::<[Toggle; 32]>::new();
for &(text, cur) in radios_info.iter() {
let toggle = Toggle::new();
toggle.set_title(text);
let cursors_canvas2 = cursors_canvas.clone();
toggle.action_event().add(move |checked| {
if checked { cursors_canvas2.set_cursor(cur); }
});
radios.push(toggle);
}
// The arrow cursor is the default.
radios[1].set_on(true);
let grid = grid_box!(
&radios[0],
&radios[1],
&radios[2],
&radios[3],
&radios[4],
&radios[5],
&radios[6],
&radios[7],
&radios[8],
&radios[9],
&radios[10],
&radios[11],
&radios[12],
&radios[13],
&radios[14],
&radios[15],
&radios[16],
&radios[17],
);
grid.set_num_div(NumDiv::Fixed(2));
grid.fit_all_to_children();
let page = vbox!(
&cursors_canvas,
hbox!(
fill!(),
Radio::with_child(&grid),
fill!(),
),
);
Box::new(page)
}
fn create_file_dialog_page() -> Box<Control> {
let type_check_box = Toggle::new();
type_check_box.set_title("Dialog Type:");
let open_radio = Toggle::new();
open_radio.set_title("Open");
let save_radio = Toggle::new();
save_radio.set_title("Save");
let dir_radio = Toggle::new();
dir_radio.set_title("Directory");
let type_radio = Radio::with_child(&vbox!(
&open_radio,
&save_radio,
&dir_radio,
));
let dir_check_box = Toggle::new();
dir_check_box.set_title("Directory:");
let dir_text_box = Text::new();
let multiple_files_check_box = Toggle::new();
multiple_files_check_box.set_title("Multiple Files");
let show_dialog = Button::with_title("Show Dialog");
let type_check_box_capt = type_check_box.clone();
let open_radio_capt = open_radio.clone();
let save_radio_capt = save_radio.clone();
let dir_check_box_capt = dir_check_box.clone();
let dir_text_box_capt = dir_text_box.clone();
let multiple_files_check_box_capt = multiple_files_check_box.clone();
show_dialog.action_event().add(move || {
let dialog = FileDlg::new();
if type_check_box_capt.is_on() {
dialog.set_dialog_type(if open_radio_capt.is_on() {
FileDialogType::Open
} else if save_radio_capt.is_on() {
FileDialogType::Save
} else {
FileDialogType::Dir
})
}
if dir_check_box_capt.is_on() {
dialog.set_directory(&dir_text_box_capt.value());
}
if multiple_files_check_box_capt.is_on()
|
dialog.popup(ScreenPosition::CenterParent, ScreenPosition::CenterParent)
.expect("couldn't show file dialog");
});
let grid = grid_box!(
type_check_box, type_radio,
dir_check_box, dir_text_box,
multiple_files_check_box, fill!(),
fill!(), show_dialog,
);
grid.set_top_level_margin_and_gap();
grid.set_num_div(NumDiv::Fixed(2));
grid.set_size_col(1);
grid.fit_all_to_children();
// I don't think the extra vbox should be necessary, but there won't be space around
// the GridBox otherwise.
let wrapper = vbox!(&grid);
wrapper.set_top_level_margin_and_gap();
Box::new(wrapper)
}
fn create_list_page() -> Box<Control> {
let list = List::new();
list.set_items(&["A", "B", "C"]);
let list_label = Label::new();
let multiple_list = List::new();
multiple_list.set_multiple(true);
multiple_list.set_items(&["D", "E", "F"]);
let multiple_list_label = Label::new();
let dropdown = List::new();
dropdown.set_dropdown(true);
dropdown.set_items(&["Apple", "Grape", "Orange"]);
let dropdown_label = Label::new();
let edit_box = List::new();
edit_box.set_dropdown(true);
edit_box.set_edit_box(true);
edit_box.set_items(&["Cherry", "Peach", "Pumpkin", "Rhubarb"]);
let edit_box_label = Label::new();
let grid = grid_box!(
&list,
&multiple_list,
&list_label,
&multiple_list_label,
&dropdown,
&edit_box,
&dropdown_label,
&edit_box_label,
);
grid.set_top_level_margin_and_gap();
grid.set_num_div(NumDiv::Fixed(2));
grid.fit_all_to_children();
let list_label_capt = list_label.clone();
let grid_capt = grid.clone();
list.action_event().add(move |args: &ListActionArgs| {
if!args.selected {
return;
}
list_label_capt.set_title(&format!("Selected {}", args.item_index));
grid_capt.refresh_children();
grid_capt.fit_all_to_children();
});
let multiple_list_capt = multiple_list.clone();
let grid_capt = grid.clone();
multiple_list.action_event().add(move |_: &ListActionArgs| {
let mut s = String::with_capacity(32);
for (i, index) in multiple_list_capt.value_multiple().into_iter().enumerate() {
if i > 0 {
s += ", ";
}
s += &*index.to_string();
}
multiple_list_label.set_title(&format!("Selected {}", &s));
grid_capt.refresh_children();
grid_capt.fit_all_to_children();
});
let dropdown_capt = dropdown.clone();
let grid_capt = grid.clone();
dropdown.action_event().add(move |_: &ListActionArgs| {
use std::borrow::Cow;
let s = dropdown_capt.value_single().map(|i| Cow::Owned(dropdown_capt.item(i))).unwrap_or("No".into());
dropdown_label.set_title(&format!("{} Juice", &s));
grid_capt.refresh_children();
grid_capt.fit_all_to_children();
});
let grid_capt = grid.clone();
edit_box.action_event().add(move |args: &ListActionArgs| {
edit_box_label.set_title(&format!("{} Pie", &args.text));
grid_capt.refresh_children();
grid_capt.fit_all_to_children();
});
// I don't think the extra vbox should be necessary, but there won't be space around
// the GridBox otherwise.
let wrapper = vbox!(&grid);
wrapper.set_top_level_margin_and_gap();
Box::new(wrapper)
}
fn create_timer_page() -> Box<Control> {
let label = Label::new();
let start_button = Button::with_title("Start");
let stop_button = Button::with_title("Stop");
let timer = Timer::new();
let page = vbox!(
&label,
hbox!(&start_button, &stop_button),
);
let page_capt = page.clone();
let label_capt = label.clone();
let mut counter = 1;
timer.action_event().add(move || {
label_capt.set_title(&counter.to_string());
page_capt.refresh_children();
counter += 1;
});
timer.set_time(1000);
timer.set_running(true);
let timer_capt = timer.clone();
start_button.action_event().add(move || {
timer_capt.set_running(true);
});
let timer_capt = timer.clone();
stop_button.action_event().add(move || {
timer_capt.set_running(false);
});
Box::new(page)
}
fn main() {
let dialog = Dialog::new();
let tabs = Tabs::new();
tabs.append_tabs(&[
TabInfo::new(&*create_cursors_page()).title("Cursors"),
TabInfo::new(&*create_file_dialog_page()).title("File Dialog"),
TabInfo::new(&*create_list_page()).title("List"),
TabInfo::new(&*create_timer_page()).title("Timer"),
]);
dialog.append(&tabs).expect("failed to build the window");
dialog.set_title("Showcase");
dialog.show_xy(ScreenPosition::Center, ScreenPosition::Center)
.expect("failed to show the window");
main_loop();
}
|
{
dialog.set_multiple_files(true);
}
|
conditional_block
|
showcase.rs
|
/* Copyright 2016 Jordan Miner
*
* Licensed under the MIT license <LICENSE or
* http://opensource.org/licenses/MIT>. This file may not be copied,
* modified, or distributed except according to those terms.
*/
#[macro_use]
extern crate clear_coat;
extern crate iup_sys;
extern crate smallvec;
use std::rc::Rc;
use iup_sys::*;
use smallvec::SmallVec;
use clear_coat::*;
use clear_coat::common_attrs_cbs::*;
struct CursorsCanvas {
canvas: Canvas,
}
impl CursorsCanvas {
pub fn new() -> Self {
CursorsCanvas {
canvas: Canvas::new(),
}
}
}
impl CursorAttribute for CursorsCanvas {}
unsafe impl Control for CursorsCanvas {
fn
|
(&self) -> *mut Ihandle {
self.canvas.handle()
}
}
fn create_cursors_page() -> Box<Control> {
let cursors_canvas = Rc::new(CursorsCanvas::new());
cursors_canvas.canvas.set_min_size(300, 200);
let radios_info = [
("None", Cursor::None),
("Arrow", Cursor::Arrow),
("Busy", Cursor::Busy),
("Cross", Cursor::Cross),
("Hand", Cursor::Hand),
("Help", Cursor::Help),
("Move", Cursor::Move),
("ResizeN", Cursor::ResizeN),
("ResizeS", Cursor::ResizeS),
("ResizeNS", Cursor::ResizeNS),
("ResizeW", Cursor::ResizeW),
("ResizeE", Cursor::ResizeE),
("ResizeWE", Cursor::ResizeWE),
("ResizeNE", Cursor::ResizeNE),
("ResizeSW", Cursor::ResizeSW),
("ResizeNW", Cursor::ResizeNW),
("ResizeSE", Cursor::ResizeSE),
("Text", Cursor::Text),
];
let mut radios = SmallVec::<[Toggle; 32]>::new();
for &(text, cur) in radios_info.iter() {
let toggle = Toggle::new();
toggle.set_title(text);
let cursors_canvas2 = cursors_canvas.clone();
toggle.action_event().add(move |checked| {
if checked { cursors_canvas2.set_cursor(cur); }
});
radios.push(toggle);
}
// The arrow cursor is the default.
radios[1].set_on(true);
let grid = grid_box!(
&radios[0],
&radios[1],
&radios[2],
&radios[3],
&radios[4],
&radios[5],
&radios[6],
&radios[7],
&radios[8],
&radios[9],
&radios[10],
&radios[11],
&radios[12],
&radios[13],
&radios[14],
&radios[15],
&radios[16],
&radios[17],
);
grid.set_num_div(NumDiv::Fixed(2));
grid.fit_all_to_children();
let page = vbox!(
&cursors_canvas,
hbox!(
fill!(),
Radio::with_child(&grid),
fill!(),
),
);
Box::new(page)
}
fn create_file_dialog_page() -> Box<Control> {
let type_check_box = Toggle::new();
type_check_box.set_title("Dialog Type:");
let open_radio = Toggle::new();
open_radio.set_title("Open");
let save_radio = Toggle::new();
save_radio.set_title("Save");
let dir_radio = Toggle::new();
dir_radio.set_title("Directory");
let type_radio = Radio::with_child(&vbox!(
&open_radio,
&save_radio,
&dir_radio,
));
let dir_check_box = Toggle::new();
dir_check_box.set_title("Directory:");
let dir_text_box = Text::new();
let multiple_files_check_box = Toggle::new();
multiple_files_check_box.set_title("Multiple Files");
let show_dialog = Button::with_title("Show Dialog");
let type_check_box_capt = type_check_box.clone();
let open_radio_capt = open_radio.clone();
let save_radio_capt = save_radio.clone();
let dir_check_box_capt = dir_check_box.clone();
let dir_text_box_capt = dir_text_box.clone();
let multiple_files_check_box_capt = multiple_files_check_box.clone();
show_dialog.action_event().add(move || {
let dialog = FileDlg::new();
if type_check_box_capt.is_on() {
dialog.set_dialog_type(if open_radio_capt.is_on() {
FileDialogType::Open
} else if save_radio_capt.is_on() {
FileDialogType::Save
} else {
FileDialogType::Dir
})
}
if dir_check_box_capt.is_on() {
dialog.set_directory(&dir_text_box_capt.value());
}
if multiple_files_check_box_capt.is_on() {
dialog.set_multiple_files(true);
}
dialog.popup(ScreenPosition::CenterParent, ScreenPosition::CenterParent)
.expect("couldn't show file dialog");
});
let grid = grid_box!(
type_check_box, type_radio,
dir_check_box, dir_text_box,
multiple_files_check_box, fill!(),
fill!(), show_dialog,
);
grid.set_top_level_margin_and_gap();
grid.set_num_div(NumDiv::Fixed(2));
grid.set_size_col(1);
grid.fit_all_to_children();
// I don't think the extra vbox should be necessary, but there won't be space around
// the GridBox otherwise.
let wrapper = vbox!(&grid);
wrapper.set_top_level_margin_and_gap();
Box::new(wrapper)
}
fn create_list_page() -> Box<Control> {
let list = List::new();
list.set_items(&["A", "B", "C"]);
let list_label = Label::new();
let multiple_list = List::new();
multiple_list.set_multiple(true);
multiple_list.set_items(&["D", "E", "F"]);
let multiple_list_label = Label::new();
let dropdown = List::new();
dropdown.set_dropdown(true);
dropdown.set_items(&["Apple", "Grape", "Orange"]);
let dropdown_label = Label::new();
let edit_box = List::new();
edit_box.set_dropdown(true);
edit_box.set_edit_box(true);
edit_box.set_items(&["Cherry", "Peach", "Pumpkin", "Rhubarb"]);
let edit_box_label = Label::new();
let grid = grid_box!(
&list,
&multiple_list,
&list_label,
&multiple_list_label,
&dropdown,
&edit_box,
&dropdown_label,
&edit_box_label,
);
grid.set_top_level_margin_and_gap();
grid.set_num_div(NumDiv::Fixed(2));
grid.fit_all_to_children();
let list_label_capt = list_label.clone();
let grid_capt = grid.clone();
list.action_event().add(move |args: &ListActionArgs| {
if!args.selected {
return;
}
list_label_capt.set_title(&format!("Selected {}", args.item_index));
grid_capt.refresh_children();
grid_capt.fit_all_to_children();
});
let multiple_list_capt = multiple_list.clone();
let grid_capt = grid.clone();
multiple_list.action_event().add(move |_: &ListActionArgs| {
let mut s = String::with_capacity(32);
for (i, index) in multiple_list_capt.value_multiple().into_iter().enumerate() {
if i > 0 {
s += ", ";
}
s += &*index.to_string();
}
multiple_list_label.set_title(&format!("Selected {}", &s));
grid_capt.refresh_children();
grid_capt.fit_all_to_children();
});
let dropdown_capt = dropdown.clone();
let grid_capt = grid.clone();
dropdown.action_event().add(move |_: &ListActionArgs| {
use std::borrow::Cow;
let s = dropdown_capt.value_single().map(|i| Cow::Owned(dropdown_capt.item(i))).unwrap_or("No".into());
dropdown_label.set_title(&format!("{} Juice", &s));
grid_capt.refresh_children();
grid_capt.fit_all_to_children();
});
let grid_capt = grid.clone();
edit_box.action_event().add(move |args: &ListActionArgs| {
edit_box_label.set_title(&format!("{} Pie", &args.text));
grid_capt.refresh_children();
grid_capt.fit_all_to_children();
});
// I don't think the extra vbox should be necessary, but there won't be space around
// the GridBox otherwise.
let wrapper = vbox!(&grid);
wrapper.set_top_level_margin_and_gap();
Box::new(wrapper)
}
fn create_timer_page() -> Box<Control> {
let label = Label::new();
let start_button = Button::with_title("Start");
let stop_button = Button::with_title("Stop");
let timer = Timer::new();
let page = vbox!(
&label,
hbox!(&start_button, &stop_button),
);
let page_capt = page.clone();
let label_capt = label.clone();
let mut counter = 1;
timer.action_event().add(move || {
label_capt.set_title(&counter.to_string());
page_capt.refresh_children();
counter += 1;
});
timer.set_time(1000);
timer.set_running(true);
let timer_capt = timer.clone();
start_button.action_event().add(move || {
timer_capt.set_running(true);
});
let timer_capt = timer.clone();
stop_button.action_event().add(move || {
timer_capt.set_running(false);
});
Box::new(page)
}
fn main() {
let dialog = Dialog::new();
let tabs = Tabs::new();
tabs.append_tabs(&[
TabInfo::new(&*create_cursors_page()).title("Cursors"),
TabInfo::new(&*create_file_dialog_page()).title("File Dialog"),
TabInfo::new(&*create_list_page()).title("List"),
TabInfo::new(&*create_timer_page()).title("Timer"),
]);
dialog.append(&tabs).expect("failed to build the window");
dialog.set_title("Showcase");
dialog.show_xy(ScreenPosition::Center, ScreenPosition::Center)
.expect("failed to show the window");
main_loop();
}
|
handle
|
identifier_name
|
showcase.rs
|
/* Copyright 2016 Jordan Miner
*
* Licensed under the MIT license <LICENSE or
* http://opensource.org/licenses/MIT>. This file may not be copied,
* modified, or distributed except according to those terms.
*/
#[macro_use]
extern crate clear_coat;
extern crate iup_sys;
extern crate smallvec;
use std::rc::Rc;
use iup_sys::*;
use smallvec::SmallVec;
use clear_coat::*;
use clear_coat::common_attrs_cbs::*;
struct CursorsCanvas {
canvas: Canvas,
}
impl CursorsCanvas {
pub fn new() -> Self {
CursorsCanvas {
canvas: Canvas::new(),
}
}
}
impl CursorAttribute for CursorsCanvas {}
unsafe impl Control for CursorsCanvas {
fn handle(&self) -> *mut Ihandle {
self.canvas.handle()
}
}
fn create_cursors_page() -> Box<Control> {
let cursors_canvas = Rc::new(CursorsCanvas::new());
cursors_canvas.canvas.set_min_size(300, 200);
let radios_info = [
("None", Cursor::None),
("Arrow", Cursor::Arrow),
("Busy", Cursor::Busy),
("Cross", Cursor::Cross),
("Hand", Cursor::Hand),
("Help", Cursor::Help),
("Move", Cursor::Move),
("ResizeN", Cursor::ResizeN),
("ResizeS", Cursor::ResizeS),
("ResizeNS", Cursor::ResizeNS),
("ResizeW", Cursor::ResizeW),
("ResizeE", Cursor::ResizeE),
("ResizeWE", Cursor::ResizeWE),
("ResizeNE", Cursor::ResizeNE),
("ResizeSW", Cursor::ResizeSW),
("ResizeNW", Cursor::ResizeNW),
("ResizeSE", Cursor::ResizeSE),
("Text", Cursor::Text),
];
let mut radios = SmallVec::<[Toggle; 32]>::new();
for &(text, cur) in radios_info.iter() {
let toggle = Toggle::new();
toggle.set_title(text);
let cursors_canvas2 = cursors_canvas.clone();
toggle.action_event().add(move |checked| {
if checked { cursors_canvas2.set_cursor(cur); }
});
radios.push(toggle);
}
// The arrow cursor is the default.
radios[1].set_on(true);
let grid = grid_box!(
&radios[0],
&radios[1],
&radios[2],
&radios[3],
&radios[4],
&radios[5],
&radios[6],
&radios[7],
&radios[8],
&radios[9],
&radios[10],
&radios[11],
&radios[12],
&radios[13],
&radios[14],
&radios[15],
&radios[16],
&radios[17],
);
grid.set_num_div(NumDiv::Fixed(2));
grid.fit_all_to_children();
let page = vbox!(
&cursors_canvas,
hbox!(
fill!(),
Radio::with_child(&grid),
fill!(),
),
);
Box::new(page)
}
fn create_file_dialog_page() -> Box<Control> {
let type_check_box = Toggle::new();
type_check_box.set_title("Dialog Type:");
let open_radio = Toggle::new();
open_radio.set_title("Open");
let save_radio = Toggle::new();
save_radio.set_title("Save");
let dir_radio = Toggle::new();
dir_radio.set_title("Directory");
let type_radio = Radio::with_child(&vbox!(
&open_radio,
&save_radio,
&dir_radio,
));
let dir_check_box = Toggle::new();
dir_check_box.set_title("Directory:");
let dir_text_box = Text::new();
let multiple_files_check_box = Toggle::new();
multiple_files_check_box.set_title("Multiple Files");
let show_dialog = Button::with_title("Show Dialog");
let type_check_box_capt = type_check_box.clone();
let open_radio_capt = open_radio.clone();
let save_radio_capt = save_radio.clone();
let dir_check_box_capt = dir_check_box.clone();
let dir_text_box_capt = dir_text_box.clone();
let multiple_files_check_box_capt = multiple_files_check_box.clone();
show_dialog.action_event().add(move || {
let dialog = FileDlg::new();
if type_check_box_capt.is_on() {
dialog.set_dialog_type(if open_radio_capt.is_on() {
FileDialogType::Open
} else if save_radio_capt.is_on() {
FileDialogType::Save
} else {
FileDialogType::Dir
})
}
if dir_check_box_capt.is_on() {
dialog.set_directory(&dir_text_box_capt.value());
}
if multiple_files_check_box_capt.is_on() {
dialog.set_multiple_files(true);
}
dialog.popup(ScreenPosition::CenterParent, ScreenPosition::CenterParent)
.expect("couldn't show file dialog");
});
let grid = grid_box!(
type_check_box, type_radio,
dir_check_box, dir_text_box,
multiple_files_check_box, fill!(),
fill!(), show_dialog,
);
grid.set_top_level_margin_and_gap();
grid.set_num_div(NumDiv::Fixed(2));
grid.set_size_col(1);
grid.fit_all_to_children();
// I don't think the extra vbox should be necessary, but there won't be space around
// the GridBox otherwise.
let wrapper = vbox!(&grid);
wrapper.set_top_level_margin_and_gap();
Box::new(wrapper)
}
fn create_list_page() -> Box<Control> {
let list = List::new();
list.set_items(&["A", "B", "C"]);
let list_label = Label::new();
let multiple_list = List::new();
multiple_list.set_multiple(true);
multiple_list.set_items(&["D", "E", "F"]);
let multiple_list_label = Label::new();
let dropdown = List::new();
dropdown.set_dropdown(true);
dropdown.set_items(&["Apple", "Grape", "Orange"]);
let dropdown_label = Label::new();
let edit_box = List::new();
edit_box.set_dropdown(true);
edit_box.set_edit_box(true);
edit_box.set_items(&["Cherry", "Peach", "Pumpkin", "Rhubarb"]);
let edit_box_label = Label::new();
let grid = grid_box!(
&list,
&multiple_list,
&list_label,
&multiple_list_label,
&dropdown,
&edit_box,
&dropdown_label,
&edit_box_label,
);
grid.set_top_level_margin_and_gap();
grid.set_num_div(NumDiv::Fixed(2));
grid.fit_all_to_children();
let list_label_capt = list_label.clone();
let grid_capt = grid.clone();
list.action_event().add(move |args: &ListActionArgs| {
if!args.selected {
return;
}
list_label_capt.set_title(&format!("Selected {}", args.item_index));
grid_capt.refresh_children();
grid_capt.fit_all_to_children();
});
let multiple_list_capt = multiple_list.clone();
let grid_capt = grid.clone();
multiple_list.action_event().add(move |_: &ListActionArgs| {
let mut s = String::with_capacity(32);
for (i, index) in multiple_list_capt.value_multiple().into_iter().enumerate() {
if i > 0 {
s += ", ";
}
s += &*index.to_string();
}
multiple_list_label.set_title(&format!("Selected {}", &s));
grid_capt.refresh_children();
grid_capt.fit_all_to_children();
});
let dropdown_capt = dropdown.clone();
let grid_capt = grid.clone();
dropdown.action_event().add(move |_: &ListActionArgs| {
use std::borrow::Cow;
let s = dropdown_capt.value_single().map(|i| Cow::Owned(dropdown_capt.item(i))).unwrap_or("No".into());
dropdown_label.set_title(&format!("{} Juice", &s));
grid_capt.refresh_children();
grid_capt.fit_all_to_children();
});
let grid_capt = grid.clone();
edit_box.action_event().add(move |args: &ListActionArgs| {
edit_box_label.set_title(&format!("{} Pie", &args.text));
grid_capt.refresh_children();
grid_capt.fit_all_to_children();
});
// I don't think the extra vbox should be necessary, but there won't be space around
// the GridBox otherwise.
let wrapper = vbox!(&grid);
wrapper.set_top_level_margin_and_gap();
Box::new(wrapper)
}
fn create_timer_page() -> Box<Control> {
let label = Label::new();
|
let page = vbox!(
&label,
hbox!(&start_button, &stop_button),
);
let page_capt = page.clone();
let label_capt = label.clone();
let mut counter = 1;
timer.action_event().add(move || {
label_capt.set_title(&counter.to_string());
page_capt.refresh_children();
counter += 1;
});
timer.set_time(1000);
timer.set_running(true);
let timer_capt = timer.clone();
start_button.action_event().add(move || {
timer_capt.set_running(true);
});
let timer_capt = timer.clone();
stop_button.action_event().add(move || {
timer_capt.set_running(false);
});
Box::new(page)
}
fn main() {
let dialog = Dialog::new();
let tabs = Tabs::new();
tabs.append_tabs(&[
TabInfo::new(&*create_cursors_page()).title("Cursors"),
TabInfo::new(&*create_file_dialog_page()).title("File Dialog"),
TabInfo::new(&*create_list_page()).title("List"),
TabInfo::new(&*create_timer_page()).title("Timer"),
]);
dialog.append(&tabs).expect("failed to build the window");
dialog.set_title("Showcase");
dialog.show_xy(ScreenPosition::Center, ScreenPosition::Center)
.expect("failed to show the window");
main_loop();
}
|
let start_button = Button::with_title("Start");
let stop_button = Button::with_title("Stop");
let timer = Timer::new();
|
random_line_split
|
lib.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use crate::{
cli::{Options, INLINE_PRELUDE},
prelude_template_helpers::StratificationHelper,
};
use abigen::Abigen;
use anyhow::anyhow;
use boogie_backend::{boogie_wrapper::BoogieWrapper, bytecode_translator::BoogieTranslator};
use bytecode::{
data_invariant_instrumentation::DataInvariantInstrumentationProcessor,
debug_instrumentation::DebugInstrumenter,
function_target_pipeline::{FunctionTargetPipeline, FunctionTargetsHolder},
global_invariant_instrumentation::GlobalInvariantInstrumentationProcessor,
global_invariant_instrumentation_v2::GlobalInvariantInstrumentationProcessorV2,
read_write_set_analysis::{self, ReadWriteSetProcessor},
spec_instrumentation::SpecInstrumentationProcessor,
};
use codespan_reporting::term::termcolor::{ColorChoice, StandardStream, WriteColor};
use docgen::Docgen;
use errmapgen::ErrmapGen;
use handlebars::Handlebars;
use itertools::Itertools;
#[allow(unused_imports)]
use log::{debug, info, warn};
use move_lang::find_move_filenames;
use move_model::{code_writer::CodeWriter, emit, emitln, model::GlobalEnv, run_model_builder};
use once_cell::sync::Lazy;
use regex::Regex;
use std::{
collections::{BTreeMap, BTreeSet},
fs,
fs::File,
io::Read,
path::{Path, PathBuf},
time::Instant,
};
pub mod cli;
mod pipelines;
mod prelude_template_helpers;
// =================================================================================================
// Entry Point
/// Content of the default prelude.
const DEFAULT_PRELUDE: &[u8] = include_bytes!("prelude.bpl");
pub fn
|
<W: WriteColor>(
error_writer: &mut W,
options: Options,
) -> anyhow::Result<()> {
let now = Instant::now();
let target_sources = find_move_filenames(&options.move_sources, true)?;
let all_sources = collect_all_sources(
&target_sources,
&find_move_filenames(&options.move_deps, true)?,
options.inv_v2,
)?;
let other_sources = remove_sources(&target_sources, all_sources);
let address = Some(options.account_address.as_ref());
debug!("parsing and checking sources");
let mut env: GlobalEnv = run_model_builder(target_sources, other_sources, address)?;
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with checking errors"));
}
if options.prover.report_warnings && env.has_warnings() {
env.report_warnings(error_writer);
}
// Add the prover options as an extension to the environment, so they can be accessed
// from there.
env.set_extension(options.prover.clone());
// Until this point, prover and docgen have same code. Here we part ways.
if options.run_docgen {
return run_docgen(&env, &options, error_writer, now);
}
// Same for ABI generator.
if options.run_abigen {
return run_abigen(&env, &options, now);
}
// Same for the error map generator
if options.run_errmapgen {
return Ok(run_errmapgen(&env, &options, now));
}
// Same for read/write set analysis
if options.run_read_write_set {
return Ok(run_read_write_set(&env, &options, now));
}
let targets = create_and_process_bytecode(&options, &env);
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with transformation errors"));
}
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with modifies checking errors"));
}
// Analyze and find out the set of modules/functions to be translated and/or verified.
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with analysis errors"));
}
let writer = CodeWriter::new(env.internal_loc());
add_prelude(&options, &writer)?;
let mut translator = BoogieTranslator::new(&env, &options.backend, &targets, &writer);
translator.translate();
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with boogie generation errors"));
}
let output_existed = std::path::Path::new(&options.output_path).exists();
debug!("writing boogie to `{}`", &options.output_path);
writer.process_result(|result| fs::write(&options.output_path, result))?;
let translator_elapsed = now.elapsed();
if!options.prover.generate_only {
let boogie_file_id =
writer.process_result(|result| env.add_source(&options.output_path, result, false));
let boogie = BoogieWrapper {
env: &env,
targets: &targets,
writer: &writer,
options: &options.backend,
boogie_file_id,
};
boogie.call_boogie_and_verify_output(options.backend.bench_repeat, &options.output_path)?;
let boogie_elapsed = now.elapsed();
if options.backend.bench_repeat <= 1 {
info!(
"{:.3}s translator, {:.3}s solver",
translator_elapsed.as_secs_f64(),
(boogie_elapsed - translator_elapsed).as_secs_f64()
);
} else {
info!(
"{:.3}s translator, {:.3}s solver (average over {} solver runs)",
translator_elapsed.as_secs_f64(),
(boogie_elapsed - translator_elapsed).as_secs_f64()
/ (options.backend.bench_repeat as f64),
options.backend.bench_repeat
);
}
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with boogie verification errors"));
}
}
if!output_existed &&!options.backend.keep_artifacts {
std::fs::remove_file(&options.output_path).unwrap_or_default();
}
Ok(())
}
pub fn run_move_prover_errors_to_stderr(options: Options) -> anyhow::Result<()> {
let mut error_writer = StandardStream::stderr(ColorChoice::Auto);
run_move_prover(&mut error_writer, options)
}
fn run_docgen<W: WriteColor>(
env: &GlobalEnv,
options: &Options,
error_writer: &mut W,
now: Instant,
) -> anyhow::Result<()> {
let generator = Docgen::new(env, &options.docgen);
let checking_elapsed = now.elapsed();
info!("generating documentation");
for (file, content) in generator.gen() {
let path = PathBuf::from(&file);
fs::create_dir_all(path.parent().unwrap())?;
fs::write(path.as_path(), content)?;
}
let generating_elapsed = now.elapsed();
info!(
"{:.3}s checking, {:.3}s generating",
checking_elapsed.as_secs_f64(),
(generating_elapsed - checking_elapsed).as_secs_f64()
);
if env.has_errors() {
env.report_errors(error_writer);
Err(anyhow!("exiting with documentation generation errors"))
} else {
Ok(())
}
}
fn run_abigen(env: &GlobalEnv, options: &Options, now: Instant) -> anyhow::Result<()> {
let mut generator = Abigen::new(env, &options.abigen);
let checking_elapsed = now.elapsed();
info!("generating ABI files");
generator.gen();
for (file, content) in generator.into_result() {
let path = PathBuf::from(&file);
fs::create_dir_all(path.parent().unwrap())?;
fs::write(path.as_path(), content)?;
}
let generating_elapsed = now.elapsed();
info!(
"{:.3}s checking, {:.3}s generating",
checking_elapsed.as_secs_f64(),
(generating_elapsed - checking_elapsed).as_secs_f64()
);
Ok(())
}
fn run_errmapgen(env: &GlobalEnv, options: &Options, now: Instant) {
let mut generator = ErrmapGen::new(env, &options.errmapgen);
let checking_elapsed = now.elapsed();
info!("generating error map");
generator.gen();
generator.save_result();
let generating_elapsed = now.elapsed();
info!(
"{:.3}s checking, {:.3}s generating",
checking_elapsed.as_secs_f64(),
(generating_elapsed - checking_elapsed).as_secs_f64()
);
}
fn run_read_write_set(env: &GlobalEnv, options: &Options, now: Instant) {
let mut targets = FunctionTargetsHolder::default();
for module_env in env.get_modules() {
for func_env in module_env.get_functions() {
targets.add_target(&func_env)
}
}
let mut pipeline = FunctionTargetPipeline::default();
pipeline.add_processor(ReadWriteSetProcessor::new());
let start = now.elapsed();
info!("generating read/write set");
pipeline.run(env, &mut targets, None);
read_write_set_analysis::get_read_write_set(env, &targets);
println!("generated for {:?}", options.move_sources);
let end = now.elapsed();
info!("{:.3}s analyzing", (end - start).as_secs_f64());
}
/// Adds the prelude to the generated output.
fn add_prelude(options: &Options, writer: &CodeWriter) -> anyhow::Result<()> {
emit!(writer, "\n// ** prelude from {}\n\n", &options.prelude_path);
let content = if options.prelude_path == INLINE_PRELUDE {
debug!("using inline prelude");
String::from_utf8_lossy(DEFAULT_PRELUDE).to_string()
} else {
debug!("using prelude at {}", &options.prelude_path);
fs::read_to_string(&options.prelude_path)?
};
let mut handlebars = Handlebars::new();
handlebars.register_helper(
"stratified",
Box::new(StratificationHelper::new(
options.backend.stratification_depth,
)),
);
let expanded_content = handlebars.render_template(&content, &options)?;
emitln!(writer, &expanded_content);
Ok(())
}
/// Create bytecode and process it.
fn create_and_process_bytecode(options: &Options, env: &GlobalEnv) -> FunctionTargetsHolder {
let mut targets = FunctionTargetsHolder::default();
// Add function targets for all functions in the environment.
for module_env in env.get_modules() {
for func_env in module_env.get_functions() {
targets.add_target(&func_env)
}
}
// Create processing pipeline and run it.
let pipeline = create_bytecode_processing_pipeline(options);
let dump_file = if options.prover.dump_bytecode {
Some(
options
.move_sources
.get(0)
.cloned()
.unwrap_or_else(|| "bytecode".to_string())
.replace(".move", ""),
)
} else {
None
};
pipeline.run(env, &mut targets, dump_file);
targets
}
/// Function to create the transformation pipeline.
fn create_bytecode_processing_pipeline(options: &Options) -> FunctionTargetPipeline {
let mut res = FunctionTargetPipeline::default();
// Add processors in order they are executed.
res.add_processor(DebugInstrumenter::new());
pipelines::pipelines(options)
.into_iter()
.for_each(|processor| res.add_processor(processor));
res.add_processor(SpecInstrumentationProcessor::new());
res.add_processor(DataInvariantInstrumentationProcessor::new());
if options.inv_v2 {
// *** convert to v2 version ***
res.add_processor(GlobalInvariantInstrumentationProcessorV2::new());
} else {
res.add_processor(GlobalInvariantInstrumentationProcessor::new());
}
res
}
/// Remove the target Move files from the list of files.
fn remove_sources(sources: &[String], all_files: Vec<String>) -> Vec<String> {
let canonical_sources = sources
.iter()
.map(|s| canonicalize(s))
.collect::<BTreeSet<_>>();
all_files
.into_iter()
.filter(|d|!canonical_sources.contains(&canonicalize(d)))
.collect_vec()
}
/// Collect all the relevant Move sources among sources represented by `input deps`
/// parameter. The resulting vector of sources includes target sources, dependencies
/// of target sources, (recursive)friends of targets and dependencies, and
/// dependencies of friends.
fn collect_all_sources(
target_sources: &[String],
input_deps: &[String],
use_inv_v2: bool,
) -> anyhow::Result<Vec<String>> {
let mut all_sources = target_sources.to_vec();
static DEP_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?m)use\s*0x[0-9abcdefABCDEF]+::\s*(\w+)").unwrap());
static NEW_FRIEND_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?m)friend\s*0x[0-9abcdefABCDEF]+::\s*(\w+)").unwrap());
static FRIEND_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"(?m)pragma\s*friend\s*=\s*0x[0-9abcdefABCDEF]+::\s*(\w+)").unwrap()
});
let target_deps = calculate_deps(&all_sources, input_deps, &DEP_REGEX)?;
all_sources.extend(target_deps);
let friend_sources = calculate_deps(
&all_sources,
input_deps,
if use_inv_v2 {
&NEW_FRIEND_REGEX
} else {
&FRIEND_REGEX
},
)?;
all_sources.extend(friend_sources);
let friend_deps = calculate_deps(&all_sources, input_deps, &DEP_REGEX)?;
all_sources.extend(friend_deps);
Ok(all_sources)
}
/// Calculates transitive dependencies of the given Move sources. This function
/// is also used to calculate transitive friends depending on the regex provided
/// for extracting matches.
fn calculate_deps(
sources: &[String],
input_deps: &[String],
regex: &Regex,
) -> anyhow::Result<Vec<String>> {
let file_map = calculate_file_map(input_deps)?;
let mut deps = vec![];
let mut visited = BTreeSet::new();
for src in sources.iter() {
calculate_deps_recursively(Path::new(src), &file_map, &mut visited, &mut deps, regex)?;
}
// Remove input sources from deps. They can end here because our dep analysis is an
// over-approximation and for example cannot distinguish between references inside
// and outside comments.
let mut deps = remove_sources(sources, deps);
// Sort deps by simple file name. Sorting is important because different orders
// caused by platform dependent ways how `calculate_deps_recursively` may return values, can
// cause different behavior of the SMT solver (butterfly effect). By using the simple file
// name we abstract from places where the sources live in the file system. Since Move has
// no namespaces and file names can be expected to be unique matching module/script names,
// this should work in most cases.
deps.sort_by(|a, b| {
let fa = PathBuf::from(a);
let fb = PathBuf::from(b);
Ord::cmp(fa.file_name().unwrap(), fb.file_name().unwrap())
});
Ok(deps)
}
fn canonicalize(s: &str) -> String {
match fs::canonicalize(s) {
Ok(p) => p.to_string_lossy().to_string(),
Err(_) => s.to_string(),
}
}
/// Recursively calculate dependencies.
fn calculate_deps_recursively(
path: &Path,
file_map: &BTreeMap<String, PathBuf>,
visited: &mut BTreeSet<String>,
deps: &mut Vec<String>,
regex: &Regex,
) -> anyhow::Result<()> {
if!visited.insert(path.to_string_lossy().to_string()) {
return Ok(());
}
debug!("including `{}`", path.display());
for dep in extract_matches(path, regex)? {
if let Some(dep_path) = file_map.get(&dep) {
let dep_str = dep_path.to_string_lossy().to_string();
if!deps.contains(&dep_str) {
deps.push(dep_str);
calculate_deps_recursively(dep_path.as_path(), file_map, visited, deps, regex)?;
}
}
}
Ok(())
}
/// Calculate a map of module names to files which define those modules.
fn calculate_file_map(deps: &[String]) -> anyhow::Result<BTreeMap<String, PathBuf>> {
static REX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?m)module\s+(\w+)\s*\{").unwrap());
let mut module_to_file = BTreeMap::new();
for dep in deps {
let dep_path = PathBuf::from(dep);
for module in extract_matches(dep_path.as_path(), &*REX)? {
module_to_file.insert(module, dep_path.clone());
}
}
Ok(module_to_file)
}
/// Extracts matches out of some text file. `rex` must be a regular expression with one anonymous
/// group.
fn extract_matches(path: &Path, rex: &Regex) -> anyhow::Result<Vec<String>> {
let mut content = String::new();
let mut file = File::open(path)?;
file.read_to_string(&mut content)?;
let mut at = 0;
let mut res = vec![];
while let Some(cap) = rex.captures(&content[at..]) {
res.push(cap.get(1).unwrap().as_str().to_string());
at += cap.get(0).unwrap().end();
}
Ok(res)
}
|
run_move_prover
|
identifier_name
|
lib.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use crate::{
cli::{Options, INLINE_PRELUDE},
prelude_template_helpers::StratificationHelper,
};
use abigen::Abigen;
use anyhow::anyhow;
use boogie_backend::{boogie_wrapper::BoogieWrapper, bytecode_translator::BoogieTranslator};
use bytecode::{
data_invariant_instrumentation::DataInvariantInstrumentationProcessor,
debug_instrumentation::DebugInstrumenter,
function_target_pipeline::{FunctionTargetPipeline, FunctionTargetsHolder},
global_invariant_instrumentation::GlobalInvariantInstrumentationProcessor,
global_invariant_instrumentation_v2::GlobalInvariantInstrumentationProcessorV2,
read_write_set_analysis::{self, ReadWriteSetProcessor},
spec_instrumentation::SpecInstrumentationProcessor,
};
use codespan_reporting::term::termcolor::{ColorChoice, StandardStream, WriteColor};
use docgen::Docgen;
use errmapgen::ErrmapGen;
use handlebars::Handlebars;
use itertools::Itertools;
#[allow(unused_imports)]
use log::{debug, info, warn};
use move_lang::find_move_filenames;
use move_model::{code_writer::CodeWriter, emit, emitln, model::GlobalEnv, run_model_builder};
use once_cell::sync::Lazy;
use regex::Regex;
use std::{
collections::{BTreeMap, BTreeSet},
fs,
fs::File,
io::Read,
path::{Path, PathBuf},
time::Instant,
};
pub mod cli;
mod pipelines;
mod prelude_template_helpers;
// =================================================================================================
// Entry Point
/// Content of the default prelude.
const DEFAULT_PRELUDE: &[u8] = include_bytes!("prelude.bpl");
pub fn run_move_prover<W: WriteColor>(
error_writer: &mut W,
options: Options,
) -> anyhow::Result<()> {
let now = Instant::now();
let target_sources = find_move_filenames(&options.move_sources, true)?;
let all_sources = collect_all_sources(
&target_sources,
&find_move_filenames(&options.move_deps, true)?,
options.inv_v2,
)?;
let other_sources = remove_sources(&target_sources, all_sources);
let address = Some(options.account_address.as_ref());
debug!("parsing and checking sources");
let mut env: GlobalEnv = run_model_builder(target_sources, other_sources, address)?;
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with checking errors"));
}
if options.prover.report_warnings && env.has_warnings() {
env.report_warnings(error_writer);
}
// Add the prover options as an extension to the environment, so they can be accessed
// from there.
env.set_extension(options.prover.clone());
// Until this point, prover and docgen have same code. Here we part ways.
if options.run_docgen {
return run_docgen(&env, &options, error_writer, now);
}
// Same for ABI generator.
if options.run_abigen {
return run_abigen(&env, &options, now);
}
// Same for the error map generator
if options.run_errmapgen {
return Ok(run_errmapgen(&env, &options, now));
}
// Same for read/write set analysis
if options.run_read_write_set {
return Ok(run_read_write_set(&env, &options, now));
}
let targets = create_and_process_bytecode(&options, &env);
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with transformation errors"));
}
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with modifies checking errors"));
}
// Analyze and find out the set of modules/functions to be translated and/or verified.
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with analysis errors"));
}
let writer = CodeWriter::new(env.internal_loc());
add_prelude(&options, &writer)?;
let mut translator = BoogieTranslator::new(&env, &options.backend, &targets, &writer);
translator.translate();
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with boogie generation errors"));
}
let output_existed = std::path::Path::new(&options.output_path).exists();
debug!("writing boogie to `{}`", &options.output_path);
writer.process_result(|result| fs::write(&options.output_path, result))?;
let translator_elapsed = now.elapsed();
if!options.prover.generate_only {
let boogie_file_id =
writer.process_result(|result| env.add_source(&options.output_path, result, false));
let boogie = BoogieWrapper {
env: &env,
targets: &targets,
writer: &writer,
options: &options.backend,
boogie_file_id,
};
boogie.call_boogie_and_verify_output(options.backend.bench_repeat, &options.output_path)?;
let boogie_elapsed = now.elapsed();
if options.backend.bench_repeat <= 1 {
info!(
"{:.3}s translator, {:.3}s solver",
translator_elapsed.as_secs_f64(),
(boogie_elapsed - translator_elapsed).as_secs_f64()
);
} else {
info!(
"{:.3}s translator, {:.3}s solver (average over {} solver runs)",
translator_elapsed.as_secs_f64(),
(boogie_elapsed - translator_elapsed).as_secs_f64()
/ (options.backend.bench_repeat as f64),
options.backend.bench_repeat
);
}
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with boogie verification errors"));
}
}
if!output_existed &&!options.backend.keep_artifacts {
std::fs::remove_file(&options.output_path).unwrap_or_default();
}
Ok(())
}
pub fn run_move_prover_errors_to_stderr(options: Options) -> anyhow::Result<()> {
let mut error_writer = StandardStream::stderr(ColorChoice::Auto);
run_move_prover(&mut error_writer, options)
}
fn run_docgen<W: WriteColor>(
env: &GlobalEnv,
options: &Options,
error_writer: &mut W,
now: Instant,
) -> anyhow::Result<()> {
let generator = Docgen::new(env, &options.docgen);
let checking_elapsed = now.elapsed();
info!("generating documentation");
for (file, content) in generator.gen() {
let path = PathBuf::from(&file);
fs::create_dir_all(path.parent().unwrap())?;
fs::write(path.as_path(), content)?;
}
let generating_elapsed = now.elapsed();
info!(
"{:.3}s checking, {:.3}s generating",
checking_elapsed.as_secs_f64(),
(generating_elapsed - checking_elapsed).as_secs_f64()
);
if env.has_errors() {
env.report_errors(error_writer);
Err(anyhow!("exiting with documentation generation errors"))
} else {
Ok(())
}
}
fn run_abigen(env: &GlobalEnv, options: &Options, now: Instant) -> anyhow::Result<()> {
let mut generator = Abigen::new(env, &options.abigen);
let checking_elapsed = now.elapsed();
info!("generating ABI files");
generator.gen();
for (file, content) in generator.into_result() {
let path = PathBuf::from(&file);
fs::create_dir_all(path.parent().unwrap())?;
fs::write(path.as_path(), content)?;
}
let generating_elapsed = now.elapsed();
info!(
"{:.3}s checking, {:.3}s generating",
checking_elapsed.as_secs_f64(),
(generating_elapsed - checking_elapsed).as_secs_f64()
);
Ok(())
}
fn run_errmapgen(env: &GlobalEnv, options: &Options, now: Instant) {
let mut generator = ErrmapGen::new(env, &options.errmapgen);
let checking_elapsed = now.elapsed();
info!("generating error map");
generator.gen();
generator.save_result();
let generating_elapsed = now.elapsed();
info!(
"{:.3}s checking, {:.3}s generating",
checking_elapsed.as_secs_f64(),
(generating_elapsed - checking_elapsed).as_secs_f64()
);
}
fn run_read_write_set(env: &GlobalEnv, options: &Options, now: Instant) {
let mut targets = FunctionTargetsHolder::default();
for module_env in env.get_modules() {
for func_env in module_env.get_functions() {
targets.add_target(&func_env)
}
}
let mut pipeline = FunctionTargetPipeline::default();
pipeline.add_processor(ReadWriteSetProcessor::new());
let start = now.elapsed();
info!("generating read/write set");
pipeline.run(env, &mut targets, None);
read_write_set_analysis::get_read_write_set(env, &targets);
println!("generated for {:?}", options.move_sources);
let end = now.elapsed();
info!("{:.3}s analyzing", (end - start).as_secs_f64());
}
/// Adds the prelude to the generated output.
fn add_prelude(options: &Options, writer: &CodeWriter) -> anyhow::Result<()> {
emit!(writer, "\n// ** prelude from {}\n\n", &options.prelude_path);
let content = if options.prelude_path == INLINE_PRELUDE {
debug!("using inline prelude");
String::from_utf8_lossy(DEFAULT_PRELUDE).to_string()
} else {
debug!("using prelude at {}", &options.prelude_path);
fs::read_to_string(&options.prelude_path)?
};
let mut handlebars = Handlebars::new();
handlebars.register_helper(
"stratified",
Box::new(StratificationHelper::new(
options.backend.stratification_depth,
)),
);
let expanded_content = handlebars.render_template(&content, &options)?;
emitln!(writer, &expanded_content);
Ok(())
}
/// Create bytecode and process it.
fn create_and_process_bytecode(options: &Options, env: &GlobalEnv) -> FunctionTargetsHolder {
let mut targets = FunctionTargetsHolder::default();
// Add function targets for all functions in the environment.
for module_env in env.get_modules() {
for func_env in module_env.get_functions() {
targets.add_target(&func_env)
}
}
// Create processing pipeline and run it.
let pipeline = create_bytecode_processing_pipeline(options);
let dump_file = if options.prover.dump_bytecode {
Some(
options
.move_sources
.get(0)
.cloned()
.unwrap_or_else(|| "bytecode".to_string())
.replace(".move", ""),
)
} else {
None
};
pipeline.run(env, &mut targets, dump_file);
targets
}
/// Function to create the transformation pipeline.
fn create_bytecode_processing_pipeline(options: &Options) -> FunctionTargetPipeline {
let mut res = FunctionTargetPipeline::default();
// Add processors in order they are executed.
res.add_processor(DebugInstrumenter::new());
pipelines::pipelines(options)
.into_iter()
.for_each(|processor| res.add_processor(processor));
res.add_processor(SpecInstrumentationProcessor::new());
res.add_processor(DataInvariantInstrumentationProcessor::new());
if options.inv_v2 {
// *** convert to v2 version ***
res.add_processor(GlobalInvariantInstrumentationProcessorV2::new());
} else {
res.add_processor(GlobalInvariantInstrumentationProcessor::new());
}
res
}
/// Remove the target Move files from the list of files.
fn remove_sources(sources: &[String], all_files: Vec<String>) -> Vec<String> {
let canonical_sources = sources
.iter()
.map(|s| canonicalize(s))
.collect::<BTreeSet<_>>();
all_files
.into_iter()
.filter(|d|!canonical_sources.contains(&canonicalize(d)))
.collect_vec()
}
/// Collect all the relevant Move sources among sources represented by `input deps`
/// parameter. The resulting vector of sources includes target sources, dependencies
/// of target sources, (recursive)friends of targets and dependencies, and
/// dependencies of friends.
fn collect_all_sources(
target_sources: &[String],
input_deps: &[String],
use_inv_v2: bool,
) -> anyhow::Result<Vec<String>> {
let mut all_sources = target_sources.to_vec();
static DEP_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?m)use\s*0x[0-9abcdefABCDEF]+::\s*(\w+)").unwrap());
static NEW_FRIEND_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?m)friend\s*0x[0-9abcdefABCDEF]+::\s*(\w+)").unwrap());
static FRIEND_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"(?m)pragma\s*friend\s*=\s*0x[0-9abcdefABCDEF]+::\s*(\w+)").unwrap()
});
let target_deps = calculate_deps(&all_sources, input_deps, &DEP_REGEX)?;
all_sources.extend(target_deps);
let friend_sources = calculate_deps(
&all_sources,
input_deps,
if use_inv_v2 {
&NEW_FRIEND_REGEX
} else {
&FRIEND_REGEX
},
)?;
all_sources.extend(friend_sources);
let friend_deps = calculate_deps(&all_sources, input_deps, &DEP_REGEX)?;
all_sources.extend(friend_deps);
Ok(all_sources)
}
/// Calculates transitive dependencies of the given Move sources. This function
/// is also used to calculate transitive friends depending on the regex provided
/// for extracting matches.
fn calculate_deps(
sources: &[String],
input_deps: &[String],
regex: &Regex,
) -> anyhow::Result<Vec<String>>
|
Ord::cmp(fa.file_name().unwrap(), fb.file_name().unwrap())
});
Ok(deps)
}
fn canonicalize(s: &str) -> String {
match fs::canonicalize(s) {
Ok(p) => p.to_string_lossy().to_string(),
Err(_) => s.to_string(),
}
}
/// Recursively calculate dependencies.
fn calculate_deps_recursively(
path: &Path,
file_map: &BTreeMap<String, PathBuf>,
visited: &mut BTreeSet<String>,
deps: &mut Vec<String>,
regex: &Regex,
) -> anyhow::Result<()> {
if!visited.insert(path.to_string_lossy().to_string()) {
return Ok(());
}
debug!("including `{}`", path.display());
for dep in extract_matches(path, regex)? {
if let Some(dep_path) = file_map.get(&dep) {
let dep_str = dep_path.to_string_lossy().to_string();
if!deps.contains(&dep_str) {
deps.push(dep_str);
calculate_deps_recursively(dep_path.as_path(), file_map, visited, deps, regex)?;
}
}
}
Ok(())
}
/// Calculate a map of module names to files which define those modules.
fn calculate_file_map(deps: &[String]) -> anyhow::Result<BTreeMap<String, PathBuf>> {
static REX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?m)module\s+(\w+)\s*\{").unwrap());
let mut module_to_file = BTreeMap::new();
for dep in deps {
let dep_path = PathBuf::from(dep);
for module in extract_matches(dep_path.as_path(), &*REX)? {
module_to_file.insert(module, dep_path.clone());
}
}
Ok(module_to_file)
}
/// Extracts matches out of some text file. `rex` must be a regular expression with one anonymous
/// group.
fn extract_matches(path: &Path, rex: &Regex) -> anyhow::Result<Vec<String>> {
let mut content = String::new();
let mut file = File::open(path)?;
file.read_to_string(&mut content)?;
let mut at = 0;
let mut res = vec![];
while let Some(cap) = rex.captures(&content[at..]) {
res.push(cap.get(1).unwrap().as_str().to_string());
at += cap.get(0).unwrap().end();
}
Ok(res)
}
|
{
let file_map = calculate_file_map(input_deps)?;
let mut deps = vec![];
let mut visited = BTreeSet::new();
for src in sources.iter() {
calculate_deps_recursively(Path::new(src), &file_map, &mut visited, &mut deps, regex)?;
}
// Remove input sources from deps. They can end here because our dep analysis is an
// over-approximation and for example cannot distinguish between references inside
// and outside comments.
let mut deps = remove_sources(sources, deps);
// Sort deps by simple file name. Sorting is important because different orders
// caused by platform dependent ways how `calculate_deps_recursively` may return values, can
// cause different behavior of the SMT solver (butterfly effect). By using the simple file
// name we abstract from places where the sources live in the file system. Since Move has
// no namespaces and file names can be expected to be unique matching module/script names,
// this should work in most cases.
deps.sort_by(|a, b| {
let fa = PathBuf::from(a);
let fb = PathBuf::from(b);
|
identifier_body
|
lib.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use crate::{
cli::{Options, INLINE_PRELUDE},
prelude_template_helpers::StratificationHelper,
};
use abigen::Abigen;
use anyhow::anyhow;
use boogie_backend::{boogie_wrapper::BoogieWrapper, bytecode_translator::BoogieTranslator};
use bytecode::{
data_invariant_instrumentation::DataInvariantInstrumentationProcessor,
debug_instrumentation::DebugInstrumenter,
function_target_pipeline::{FunctionTargetPipeline, FunctionTargetsHolder},
global_invariant_instrumentation::GlobalInvariantInstrumentationProcessor,
global_invariant_instrumentation_v2::GlobalInvariantInstrumentationProcessorV2,
read_write_set_analysis::{self, ReadWriteSetProcessor},
spec_instrumentation::SpecInstrumentationProcessor,
};
use codespan_reporting::term::termcolor::{ColorChoice, StandardStream, WriteColor};
use docgen::Docgen;
use errmapgen::ErrmapGen;
use handlebars::Handlebars;
use itertools::Itertools;
#[allow(unused_imports)]
use log::{debug, info, warn};
use move_lang::find_move_filenames;
use move_model::{code_writer::CodeWriter, emit, emitln, model::GlobalEnv, run_model_builder};
use once_cell::sync::Lazy;
use regex::Regex;
use std::{
collections::{BTreeMap, BTreeSet},
fs,
fs::File,
io::Read,
path::{Path, PathBuf},
time::Instant,
};
pub mod cli;
mod pipelines;
mod prelude_template_helpers;
// =================================================================================================
// Entry Point
/// Content of the default prelude.
const DEFAULT_PRELUDE: &[u8] = include_bytes!("prelude.bpl");
pub fn run_move_prover<W: WriteColor>(
error_writer: &mut W,
options: Options,
) -> anyhow::Result<()> {
let now = Instant::now();
let target_sources = find_move_filenames(&options.move_sources, true)?;
let all_sources = collect_all_sources(
&target_sources,
&find_move_filenames(&options.move_deps, true)?,
options.inv_v2,
)?;
let other_sources = remove_sources(&target_sources, all_sources);
let address = Some(options.account_address.as_ref());
debug!("parsing and checking sources");
let mut env: GlobalEnv = run_model_builder(target_sources, other_sources, address)?;
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with checking errors"));
}
if options.prover.report_warnings && env.has_warnings() {
env.report_warnings(error_writer);
}
// Add the prover options as an extension to the environment, so they can be accessed
// from there.
env.set_extension(options.prover.clone());
// Until this point, prover and docgen have same code. Here we part ways.
if options.run_docgen {
return run_docgen(&env, &options, error_writer, now);
}
// Same for ABI generator.
if options.run_abigen {
return run_abigen(&env, &options, now);
}
// Same for the error map generator
if options.run_errmapgen {
return Ok(run_errmapgen(&env, &options, now));
}
// Same for read/write set analysis
if options.run_read_write_set {
return Ok(run_read_write_set(&env, &options, now));
}
let targets = create_and_process_bytecode(&options, &env);
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with transformation errors"));
}
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with modifies checking errors"));
}
// Analyze and find out the set of modules/functions to be translated and/or verified.
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with analysis errors"));
}
let writer = CodeWriter::new(env.internal_loc());
add_prelude(&options, &writer)?;
let mut translator = BoogieTranslator::new(&env, &options.backend, &targets, &writer);
translator.translate();
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with boogie generation errors"));
}
let output_existed = std::path::Path::new(&options.output_path).exists();
debug!("writing boogie to `{}`", &options.output_path);
writer.process_result(|result| fs::write(&options.output_path, result))?;
let translator_elapsed = now.elapsed();
if!options.prover.generate_only {
let boogie_file_id =
writer.process_result(|result| env.add_source(&options.output_path, result, false));
let boogie = BoogieWrapper {
env: &env,
targets: &targets,
writer: &writer,
options: &options.backend,
boogie_file_id,
};
boogie.call_boogie_and_verify_output(options.backend.bench_repeat, &options.output_path)?;
let boogie_elapsed = now.elapsed();
if options.backend.bench_repeat <= 1 {
info!(
"{:.3}s translator, {:.3}s solver",
translator_elapsed.as_secs_f64(),
(boogie_elapsed - translator_elapsed).as_secs_f64()
);
} else {
info!(
"{:.3}s translator, {:.3}s solver (average over {} solver runs)",
translator_elapsed.as_secs_f64(),
(boogie_elapsed - translator_elapsed).as_secs_f64()
/ (options.backend.bench_repeat as f64),
options.backend.bench_repeat
);
}
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with boogie verification errors"));
}
}
if!output_existed &&!options.backend.keep_artifacts {
std::fs::remove_file(&options.output_path).unwrap_or_default();
}
Ok(())
}
pub fn run_move_prover_errors_to_stderr(options: Options) -> anyhow::Result<()> {
let mut error_writer = StandardStream::stderr(ColorChoice::Auto);
run_move_prover(&mut error_writer, options)
}
fn run_docgen<W: WriteColor>(
env: &GlobalEnv,
options: &Options,
error_writer: &mut W,
now: Instant,
) -> anyhow::Result<()> {
let generator = Docgen::new(env, &options.docgen);
let checking_elapsed = now.elapsed();
info!("generating documentation");
for (file, content) in generator.gen() {
let path = PathBuf::from(&file);
fs::create_dir_all(path.parent().unwrap())?;
fs::write(path.as_path(), content)?;
}
let generating_elapsed = now.elapsed();
info!(
"{:.3}s checking, {:.3}s generating",
checking_elapsed.as_secs_f64(),
(generating_elapsed - checking_elapsed).as_secs_f64()
);
if env.has_errors() {
env.report_errors(error_writer);
Err(anyhow!("exiting with documentation generation errors"))
} else {
Ok(())
}
}
fn run_abigen(env: &GlobalEnv, options: &Options, now: Instant) -> anyhow::Result<()> {
let mut generator = Abigen::new(env, &options.abigen);
let checking_elapsed = now.elapsed();
info!("generating ABI files");
generator.gen();
for (file, content) in generator.into_result() {
let path = PathBuf::from(&file);
fs::create_dir_all(path.parent().unwrap())?;
fs::write(path.as_path(), content)?;
}
let generating_elapsed = now.elapsed();
info!(
"{:.3}s checking, {:.3}s generating",
checking_elapsed.as_secs_f64(),
(generating_elapsed - checking_elapsed).as_secs_f64()
);
Ok(())
}
fn run_errmapgen(env: &GlobalEnv, options: &Options, now: Instant) {
let mut generator = ErrmapGen::new(env, &options.errmapgen);
let checking_elapsed = now.elapsed();
info!("generating error map");
generator.gen();
generator.save_result();
let generating_elapsed = now.elapsed();
info!(
"{:.3}s checking, {:.3}s generating",
checking_elapsed.as_secs_f64(),
(generating_elapsed - checking_elapsed).as_secs_f64()
);
}
fn run_read_write_set(env: &GlobalEnv, options: &Options, now: Instant) {
let mut targets = FunctionTargetsHolder::default();
for module_env in env.get_modules() {
for func_env in module_env.get_functions() {
targets.add_target(&func_env)
}
}
let mut pipeline = FunctionTargetPipeline::default();
pipeline.add_processor(ReadWriteSetProcessor::new());
let start = now.elapsed();
info!("generating read/write set");
pipeline.run(env, &mut targets, None);
read_write_set_analysis::get_read_write_set(env, &targets);
println!("generated for {:?}", options.move_sources);
let end = now.elapsed();
info!("{:.3}s analyzing", (end - start).as_secs_f64());
}
/// Adds the prelude to the generated output.
fn add_prelude(options: &Options, writer: &CodeWriter) -> anyhow::Result<()> {
emit!(writer, "\n// ** prelude from {}\n\n", &options.prelude_path);
let content = if options.prelude_path == INLINE_PRELUDE {
debug!("using inline prelude");
String::from_utf8_lossy(DEFAULT_PRELUDE).to_string()
} else {
debug!("using prelude at {}", &options.prelude_path);
fs::read_to_string(&options.prelude_path)?
};
let mut handlebars = Handlebars::new();
handlebars.register_helper(
"stratified",
Box::new(StratificationHelper::new(
options.backend.stratification_depth,
)),
);
let expanded_content = handlebars.render_template(&content, &options)?;
emitln!(writer, &expanded_content);
Ok(())
}
/// Create bytecode and process it.
fn create_and_process_bytecode(options: &Options, env: &GlobalEnv) -> FunctionTargetsHolder {
let mut targets = FunctionTargetsHolder::default();
// Add function targets for all functions in the environment.
for module_env in env.get_modules() {
for func_env in module_env.get_functions() {
targets.add_target(&func_env)
}
}
// Create processing pipeline and run it.
let pipeline = create_bytecode_processing_pipeline(options);
let dump_file = if options.prover.dump_bytecode {
Some(
options
.move_sources
.get(0)
.cloned()
.unwrap_or_else(|| "bytecode".to_string())
.replace(".move", ""),
)
} else {
None
};
pipeline.run(env, &mut targets, dump_file);
targets
}
/// Function to create the transformation pipeline.
fn create_bytecode_processing_pipeline(options: &Options) -> FunctionTargetPipeline {
let mut res = FunctionTargetPipeline::default();
// Add processors in order they are executed.
res.add_processor(DebugInstrumenter::new());
pipelines::pipelines(options)
.into_iter()
.for_each(|processor| res.add_processor(processor));
res.add_processor(SpecInstrumentationProcessor::new());
res.add_processor(DataInvariantInstrumentationProcessor::new());
if options.inv_v2 {
// *** convert to v2 version ***
res.add_processor(GlobalInvariantInstrumentationProcessorV2::new());
} else {
res.add_processor(GlobalInvariantInstrumentationProcessor::new());
}
res
}
/// Remove the target Move files from the list of files.
fn remove_sources(sources: &[String], all_files: Vec<String>) -> Vec<String> {
let canonical_sources = sources
.iter()
.map(|s| canonicalize(s))
.collect::<BTreeSet<_>>();
all_files
.into_iter()
.filter(|d|!canonical_sources.contains(&canonicalize(d)))
.collect_vec()
}
/// Collect all the relevant Move sources among sources represented by `input deps`
/// parameter. The resulting vector of sources includes target sources, dependencies
/// of target sources, (recursive)friends of targets and dependencies, and
/// dependencies of friends.
fn collect_all_sources(
target_sources: &[String],
input_deps: &[String],
use_inv_v2: bool,
) -> anyhow::Result<Vec<String>> {
let mut all_sources = target_sources.to_vec();
static DEP_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?m)use\s*0x[0-9abcdefABCDEF]+::\s*(\w+)").unwrap());
static NEW_FRIEND_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?m)friend\s*0x[0-9abcdefABCDEF]+::\s*(\w+)").unwrap());
static FRIEND_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"(?m)pragma\s*friend\s*=\s*0x[0-9abcdefABCDEF]+::\s*(\w+)").unwrap()
});
let target_deps = calculate_deps(&all_sources, input_deps, &DEP_REGEX)?;
all_sources.extend(target_deps);
let friend_sources = calculate_deps(
&all_sources,
input_deps,
if use_inv_v2 {
&NEW_FRIEND_REGEX
} else
|
,
)?;
all_sources.extend(friend_sources);
let friend_deps = calculate_deps(&all_sources, input_deps, &DEP_REGEX)?;
all_sources.extend(friend_deps);
Ok(all_sources)
}
/// Calculates transitive dependencies of the given Move sources. This function
/// is also used to calculate transitive friends depending on the regex provided
/// for extracting matches.
fn calculate_deps(
sources: &[String],
input_deps: &[String],
regex: &Regex,
) -> anyhow::Result<Vec<String>> {
let file_map = calculate_file_map(input_deps)?;
let mut deps = vec![];
let mut visited = BTreeSet::new();
for src in sources.iter() {
calculate_deps_recursively(Path::new(src), &file_map, &mut visited, &mut deps, regex)?;
}
// Remove input sources from deps. They can end here because our dep analysis is an
// over-approximation and for example cannot distinguish between references inside
// and outside comments.
let mut deps = remove_sources(sources, deps);
// Sort deps by simple file name. Sorting is important because different orders
// caused by platform dependent ways how `calculate_deps_recursively` may return values, can
// cause different behavior of the SMT solver (butterfly effect). By using the simple file
// name we abstract from places where the sources live in the file system. Since Move has
// no namespaces and file names can be expected to be unique matching module/script names,
// this should work in most cases.
deps.sort_by(|a, b| {
let fa = PathBuf::from(a);
let fb = PathBuf::from(b);
Ord::cmp(fa.file_name().unwrap(), fb.file_name().unwrap())
});
Ok(deps)
}
fn canonicalize(s: &str) -> String {
match fs::canonicalize(s) {
Ok(p) => p.to_string_lossy().to_string(),
Err(_) => s.to_string(),
}
}
/// Recursively calculate dependencies.
fn calculate_deps_recursively(
path: &Path,
file_map: &BTreeMap<String, PathBuf>,
visited: &mut BTreeSet<String>,
deps: &mut Vec<String>,
regex: &Regex,
) -> anyhow::Result<()> {
if!visited.insert(path.to_string_lossy().to_string()) {
return Ok(());
}
debug!("including `{}`", path.display());
for dep in extract_matches(path, regex)? {
if let Some(dep_path) = file_map.get(&dep) {
let dep_str = dep_path.to_string_lossy().to_string();
if!deps.contains(&dep_str) {
deps.push(dep_str);
calculate_deps_recursively(dep_path.as_path(), file_map, visited, deps, regex)?;
}
}
}
Ok(())
}
/// Calculate a map of module names to files which define those modules.
fn calculate_file_map(deps: &[String]) -> anyhow::Result<BTreeMap<String, PathBuf>> {
static REX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?m)module\s+(\w+)\s*\{").unwrap());
let mut module_to_file = BTreeMap::new();
for dep in deps {
let dep_path = PathBuf::from(dep);
for module in extract_matches(dep_path.as_path(), &*REX)? {
module_to_file.insert(module, dep_path.clone());
}
}
Ok(module_to_file)
}
/// Extracts matches out of some text file. `rex` must be a regular expression with one anonymous
/// group.
fn extract_matches(path: &Path, rex: &Regex) -> anyhow::Result<Vec<String>> {
let mut content = String::new();
let mut file = File::open(path)?;
file.read_to_string(&mut content)?;
let mut at = 0;
let mut res = vec![];
while let Some(cap) = rex.captures(&content[at..]) {
res.push(cap.get(1).unwrap().as_str().to_string());
at += cap.get(0).unwrap().end();
}
Ok(res)
}
|
{
&FRIEND_REGEX
}
|
conditional_block
|
lib.rs
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use crate::{
cli::{Options, INLINE_PRELUDE},
prelude_template_helpers::StratificationHelper,
};
use abigen::Abigen;
use anyhow::anyhow;
use boogie_backend::{boogie_wrapper::BoogieWrapper, bytecode_translator::BoogieTranslator};
use bytecode::{
data_invariant_instrumentation::DataInvariantInstrumentationProcessor,
debug_instrumentation::DebugInstrumenter,
function_target_pipeline::{FunctionTargetPipeline, FunctionTargetsHolder},
global_invariant_instrumentation::GlobalInvariantInstrumentationProcessor,
global_invariant_instrumentation_v2::GlobalInvariantInstrumentationProcessorV2,
read_write_set_analysis::{self, ReadWriteSetProcessor},
spec_instrumentation::SpecInstrumentationProcessor,
};
use codespan_reporting::term::termcolor::{ColorChoice, StandardStream, WriteColor};
use docgen::Docgen;
use errmapgen::ErrmapGen;
use handlebars::Handlebars;
use itertools::Itertools;
#[allow(unused_imports)]
use log::{debug, info, warn};
use move_lang::find_move_filenames;
use move_model::{code_writer::CodeWriter, emit, emitln, model::GlobalEnv, run_model_builder};
use once_cell::sync::Lazy;
use regex::Regex;
use std::{
collections::{BTreeMap, BTreeSet},
fs,
fs::File,
io::Read,
path::{Path, PathBuf},
time::Instant,
};
pub mod cli;
mod pipelines;
mod prelude_template_helpers;
// =================================================================================================
// Entry Point
/// Content of the default prelude.
const DEFAULT_PRELUDE: &[u8] = include_bytes!("prelude.bpl");
pub fn run_move_prover<W: WriteColor>(
error_writer: &mut W,
options: Options,
) -> anyhow::Result<()> {
let now = Instant::now();
let target_sources = find_move_filenames(&options.move_sources, true)?;
let all_sources = collect_all_sources(
&target_sources,
&find_move_filenames(&options.move_deps, true)?,
options.inv_v2,
)?;
let other_sources = remove_sources(&target_sources, all_sources);
let address = Some(options.account_address.as_ref());
debug!("parsing and checking sources");
let mut env: GlobalEnv = run_model_builder(target_sources, other_sources, address)?;
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with checking errors"));
}
if options.prover.report_warnings && env.has_warnings() {
env.report_warnings(error_writer);
}
// Add the prover options as an extension to the environment, so they can be accessed
// from there.
env.set_extension(options.prover.clone());
// Until this point, prover and docgen have same code. Here we part ways.
if options.run_docgen {
return run_docgen(&env, &options, error_writer, now);
}
// Same for ABI generator.
if options.run_abigen {
return run_abigen(&env, &options, now);
|
return Ok(run_errmapgen(&env, &options, now));
}
// Same for read/write set analysis
if options.run_read_write_set {
return Ok(run_read_write_set(&env, &options, now));
}
let targets = create_and_process_bytecode(&options, &env);
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with transformation errors"));
}
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with modifies checking errors"));
}
// Analyze and find out the set of modules/functions to be translated and/or verified.
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with analysis errors"));
}
let writer = CodeWriter::new(env.internal_loc());
add_prelude(&options, &writer)?;
let mut translator = BoogieTranslator::new(&env, &options.backend, &targets, &writer);
translator.translate();
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with boogie generation errors"));
}
let output_existed = std::path::Path::new(&options.output_path).exists();
debug!("writing boogie to `{}`", &options.output_path);
writer.process_result(|result| fs::write(&options.output_path, result))?;
let translator_elapsed = now.elapsed();
if!options.prover.generate_only {
let boogie_file_id =
writer.process_result(|result| env.add_source(&options.output_path, result, false));
let boogie = BoogieWrapper {
env: &env,
targets: &targets,
writer: &writer,
options: &options.backend,
boogie_file_id,
};
boogie.call_boogie_and_verify_output(options.backend.bench_repeat, &options.output_path)?;
let boogie_elapsed = now.elapsed();
if options.backend.bench_repeat <= 1 {
info!(
"{:.3}s translator, {:.3}s solver",
translator_elapsed.as_secs_f64(),
(boogie_elapsed - translator_elapsed).as_secs_f64()
);
} else {
info!(
"{:.3}s translator, {:.3}s solver (average over {} solver runs)",
translator_elapsed.as_secs_f64(),
(boogie_elapsed - translator_elapsed).as_secs_f64()
/ (options.backend.bench_repeat as f64),
options.backend.bench_repeat
);
}
if env.has_errors() {
env.report_errors(error_writer);
return Err(anyhow!("exiting with boogie verification errors"));
}
}
if!output_existed &&!options.backend.keep_artifacts {
std::fs::remove_file(&options.output_path).unwrap_or_default();
}
Ok(())
}
pub fn run_move_prover_errors_to_stderr(options: Options) -> anyhow::Result<()> {
let mut error_writer = StandardStream::stderr(ColorChoice::Auto);
run_move_prover(&mut error_writer, options)
}
fn run_docgen<W: WriteColor>(
env: &GlobalEnv,
options: &Options,
error_writer: &mut W,
now: Instant,
) -> anyhow::Result<()> {
let generator = Docgen::new(env, &options.docgen);
let checking_elapsed = now.elapsed();
info!("generating documentation");
for (file, content) in generator.gen() {
let path = PathBuf::from(&file);
fs::create_dir_all(path.parent().unwrap())?;
fs::write(path.as_path(), content)?;
}
let generating_elapsed = now.elapsed();
info!(
"{:.3}s checking, {:.3}s generating",
checking_elapsed.as_secs_f64(),
(generating_elapsed - checking_elapsed).as_secs_f64()
);
if env.has_errors() {
env.report_errors(error_writer);
Err(anyhow!("exiting with documentation generation errors"))
} else {
Ok(())
}
}
fn run_abigen(env: &GlobalEnv, options: &Options, now: Instant) -> anyhow::Result<()> {
let mut generator = Abigen::new(env, &options.abigen);
let checking_elapsed = now.elapsed();
info!("generating ABI files");
generator.gen();
for (file, content) in generator.into_result() {
let path = PathBuf::from(&file);
fs::create_dir_all(path.parent().unwrap())?;
fs::write(path.as_path(), content)?;
}
let generating_elapsed = now.elapsed();
info!(
"{:.3}s checking, {:.3}s generating",
checking_elapsed.as_secs_f64(),
(generating_elapsed - checking_elapsed).as_secs_f64()
);
Ok(())
}
fn run_errmapgen(env: &GlobalEnv, options: &Options, now: Instant) {
let mut generator = ErrmapGen::new(env, &options.errmapgen);
let checking_elapsed = now.elapsed();
info!("generating error map");
generator.gen();
generator.save_result();
let generating_elapsed = now.elapsed();
info!(
"{:.3}s checking, {:.3}s generating",
checking_elapsed.as_secs_f64(),
(generating_elapsed - checking_elapsed).as_secs_f64()
);
}
fn run_read_write_set(env: &GlobalEnv, options: &Options, now: Instant) {
let mut targets = FunctionTargetsHolder::default();
for module_env in env.get_modules() {
for func_env in module_env.get_functions() {
targets.add_target(&func_env)
}
}
let mut pipeline = FunctionTargetPipeline::default();
pipeline.add_processor(ReadWriteSetProcessor::new());
let start = now.elapsed();
info!("generating read/write set");
pipeline.run(env, &mut targets, None);
read_write_set_analysis::get_read_write_set(env, &targets);
println!("generated for {:?}", options.move_sources);
let end = now.elapsed();
info!("{:.3}s analyzing", (end - start).as_secs_f64());
}
/// Adds the prelude to the generated output.
fn add_prelude(options: &Options, writer: &CodeWriter) -> anyhow::Result<()> {
emit!(writer, "\n// ** prelude from {}\n\n", &options.prelude_path);
let content = if options.prelude_path == INLINE_PRELUDE {
debug!("using inline prelude");
String::from_utf8_lossy(DEFAULT_PRELUDE).to_string()
} else {
debug!("using prelude at {}", &options.prelude_path);
fs::read_to_string(&options.prelude_path)?
};
let mut handlebars = Handlebars::new();
handlebars.register_helper(
"stratified",
Box::new(StratificationHelper::new(
options.backend.stratification_depth,
)),
);
let expanded_content = handlebars.render_template(&content, &options)?;
emitln!(writer, &expanded_content);
Ok(())
}
/// Create bytecode and process it.
fn create_and_process_bytecode(options: &Options, env: &GlobalEnv) -> FunctionTargetsHolder {
let mut targets = FunctionTargetsHolder::default();
// Add function targets for all functions in the environment.
for module_env in env.get_modules() {
for func_env in module_env.get_functions() {
targets.add_target(&func_env)
}
}
// Create processing pipeline and run it.
let pipeline = create_bytecode_processing_pipeline(options);
let dump_file = if options.prover.dump_bytecode {
Some(
options
.move_sources
.get(0)
.cloned()
.unwrap_or_else(|| "bytecode".to_string())
.replace(".move", ""),
)
} else {
None
};
pipeline.run(env, &mut targets, dump_file);
targets
}
/// Function to create the transformation pipeline.
fn create_bytecode_processing_pipeline(options: &Options) -> FunctionTargetPipeline {
let mut res = FunctionTargetPipeline::default();
// Add processors in order they are executed.
res.add_processor(DebugInstrumenter::new());
pipelines::pipelines(options)
.into_iter()
.for_each(|processor| res.add_processor(processor));
res.add_processor(SpecInstrumentationProcessor::new());
res.add_processor(DataInvariantInstrumentationProcessor::new());
if options.inv_v2 {
// *** convert to v2 version ***
res.add_processor(GlobalInvariantInstrumentationProcessorV2::new());
} else {
res.add_processor(GlobalInvariantInstrumentationProcessor::new());
}
res
}
/// Remove the target Move files from the list of files.
fn remove_sources(sources: &[String], all_files: Vec<String>) -> Vec<String> {
let canonical_sources = sources
.iter()
.map(|s| canonicalize(s))
.collect::<BTreeSet<_>>();
all_files
.into_iter()
.filter(|d|!canonical_sources.contains(&canonicalize(d)))
.collect_vec()
}
/// Collect all the relevant Move sources among sources represented by `input deps`
/// parameter. The resulting vector of sources includes target sources, dependencies
/// of target sources, (recursive)friends of targets and dependencies, and
/// dependencies of friends.
fn collect_all_sources(
target_sources: &[String],
input_deps: &[String],
use_inv_v2: bool,
) -> anyhow::Result<Vec<String>> {
let mut all_sources = target_sources.to_vec();
static DEP_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?m)use\s*0x[0-9abcdefABCDEF]+::\s*(\w+)").unwrap());
static NEW_FRIEND_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?m)friend\s*0x[0-9abcdefABCDEF]+::\s*(\w+)").unwrap());
static FRIEND_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"(?m)pragma\s*friend\s*=\s*0x[0-9abcdefABCDEF]+::\s*(\w+)").unwrap()
});
let target_deps = calculate_deps(&all_sources, input_deps, &DEP_REGEX)?;
all_sources.extend(target_deps);
let friend_sources = calculate_deps(
&all_sources,
input_deps,
if use_inv_v2 {
&NEW_FRIEND_REGEX
} else {
&FRIEND_REGEX
},
)?;
all_sources.extend(friend_sources);
let friend_deps = calculate_deps(&all_sources, input_deps, &DEP_REGEX)?;
all_sources.extend(friend_deps);
Ok(all_sources)
}
/// Calculates transitive dependencies of the given Move sources. This function
/// is also used to calculate transitive friends depending on the regex provided
/// for extracting matches.
fn calculate_deps(
sources: &[String],
input_deps: &[String],
regex: &Regex,
) -> anyhow::Result<Vec<String>> {
let file_map = calculate_file_map(input_deps)?;
let mut deps = vec![];
let mut visited = BTreeSet::new();
for src in sources.iter() {
calculate_deps_recursively(Path::new(src), &file_map, &mut visited, &mut deps, regex)?;
}
// Remove input sources from deps. They can end here because our dep analysis is an
// over-approximation and for example cannot distinguish between references inside
// and outside comments.
let mut deps = remove_sources(sources, deps);
// Sort deps by simple file name. Sorting is important because different orders
// caused by platform dependent ways how `calculate_deps_recursively` may return values, can
// cause different behavior of the SMT solver (butterfly effect). By using the simple file
// name we abstract from places where the sources live in the file system. Since Move has
// no namespaces and file names can be expected to be unique matching module/script names,
// this should work in most cases.
deps.sort_by(|a, b| {
let fa = PathBuf::from(a);
let fb = PathBuf::from(b);
Ord::cmp(fa.file_name().unwrap(), fb.file_name().unwrap())
});
Ok(deps)
}
fn canonicalize(s: &str) -> String {
match fs::canonicalize(s) {
Ok(p) => p.to_string_lossy().to_string(),
Err(_) => s.to_string(),
}
}
/// Recursively calculate dependencies.
fn calculate_deps_recursively(
path: &Path,
file_map: &BTreeMap<String, PathBuf>,
visited: &mut BTreeSet<String>,
deps: &mut Vec<String>,
regex: &Regex,
) -> anyhow::Result<()> {
if!visited.insert(path.to_string_lossy().to_string()) {
return Ok(());
}
debug!("including `{}`", path.display());
for dep in extract_matches(path, regex)? {
if let Some(dep_path) = file_map.get(&dep) {
let dep_str = dep_path.to_string_lossy().to_string();
if!deps.contains(&dep_str) {
deps.push(dep_str);
calculate_deps_recursively(dep_path.as_path(), file_map, visited, deps, regex)?;
}
}
}
Ok(())
}
/// Calculate a map of module names to files which define those modules.
fn calculate_file_map(deps: &[String]) -> anyhow::Result<BTreeMap<String, PathBuf>> {
static REX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?m)module\s+(\w+)\s*\{").unwrap());
let mut module_to_file = BTreeMap::new();
for dep in deps {
let dep_path = PathBuf::from(dep);
for module in extract_matches(dep_path.as_path(), &*REX)? {
module_to_file.insert(module, dep_path.clone());
}
}
Ok(module_to_file)
}
/// Extracts matches out of some text file. `rex` must be a regular expression with one anonymous
/// group.
fn extract_matches(path: &Path, rex: &Regex) -> anyhow::Result<Vec<String>> {
let mut content = String::new();
let mut file = File::open(path)?;
file.read_to_string(&mut content)?;
let mut at = 0;
let mut res = vec![];
while let Some(cap) = rex.captures(&content[at..]) {
res.push(cap.get(1).unwrap().as_str().to_string());
at += cap.get(0).unwrap().end();
}
Ok(res)
}
|
}
// Same for the error map generator
if options.run_errmapgen {
|
random_line_split
|
routes.rs
|
//! # Request handlers
//!
//! Iron's [`router`]() will accept a function closure that takes
//! a mutable `Request` and returns an `IronResult<Response>`.
//! The application specifies two handlers:
//!
//! - `get_person` handles `GET /person/:id`, and will get a `Person`
//! from Redis and return them as json.
//! - `post_person` handles `POST /person/:id`, and will update a
//! `Person` in Redis with a new name.
use serde_json;
use redis::{self, Commands};
use iron::prelude::*;
use iron::status;
use router::Router;
use errors::*;
use model::*;
/// Get a person by id.
///
/// This handler takes an id from the query parameters and gets
/// the corresponding person, or returns a `HTTP 404`.
pub fn get_person(req: &mut Request) -> IronResult<Response> {
let id = get_id(&req)?;
let conn = get_conn()?;
let person_data = get_person_data(conn, &id)?;
Ok(Response::with((status::Ok, person_data)))
}
#[derive(Deserialize)]
struct PostPersonCommand {
pub name: String,
}
/// Post a new person value for an id.
///
/// This handler takes an id and `PostPersonCommand` and adds or updates
/// that person's data.
///
/// The body of the request should look something like:
///
/// ```json
/// { "name": "Some Name" }
/// ```
pub fn post_person(req: &mut Request) -> IronResult<Response> {
let id = get_id(&req)?;
let conn = get_conn()?;
let person = make_person(req, id)?;
set_person_data(conn, person)?;
Ok(Response::with(status::Ok))
}
/// Get an `Id` from the request url params.
fn get_id(req: &Request) -> Result<Id> {
req.extensions
.get::<Router>()
.unwrap()
.find("id")
.unwrap_or("")
.try_into()
}
/// Get a new Redis connection.
fn
|
() -> Result<redis::Connection> {
let client = redis::Client::open("redis://127.0.0.1/")?;
client.get_connection().map_err(|e| e.into())
}
/// Get the data for a `Person` from Redis.
fn get_person_data(conn: redis::Connection, id: &Id) -> Result<String> {
let person_data: Option<String> = conn.get(id.as_ref())?;
person_data.ok_or(Error::from(ErrorKind::PersonNotFound))
}
/// Set the data for a `Person` in Redis.
fn set_person_data(conn: redis::Connection, person: Person) -> Result<()> {
let person_data = serde_json::to_string(&person)?;
conn.set(person.id.as_ref(), person_data)?;
Ok(())
}
/// Get a person from the request body with an id.
fn make_person(req: &mut Request, id: Id) -> Result<Person> {
let cmd: PostPersonCommand = serde_json::from_reader(&mut req.body)?;
Ok(Person {
id: id,
name: cmd.name,
})
}
|
get_conn
|
identifier_name
|
routes.rs
|
//! # Request handlers
//!
//! Iron's [`router`]() will accept a function closure that takes
//! a mutable `Request` and returns an `IronResult<Response>`.
//! The application specifies two handlers:
//!
//! - `get_person` handles `GET /person/:id`, and will get a `Person`
|
use serde_json;
use redis::{self, Commands};
use iron::prelude::*;
use iron::status;
use router::Router;
use errors::*;
use model::*;
/// Get a person by id.
///
/// This handler takes an id from the query parameters and gets
/// the corresponding person, or returns a `HTTP 404`.
pub fn get_person(req: &mut Request) -> IronResult<Response> {
let id = get_id(&req)?;
let conn = get_conn()?;
let person_data = get_person_data(conn, &id)?;
Ok(Response::with((status::Ok, person_data)))
}
#[derive(Deserialize)]
struct PostPersonCommand {
pub name: String,
}
/// Post a new person value for an id.
///
/// This handler takes an id and `PostPersonCommand` and adds or updates
/// that person's data.
///
/// The body of the request should look something like:
///
/// ```json
/// { "name": "Some Name" }
/// ```
pub fn post_person(req: &mut Request) -> IronResult<Response> {
let id = get_id(&req)?;
let conn = get_conn()?;
let person = make_person(req, id)?;
set_person_data(conn, person)?;
Ok(Response::with(status::Ok))
}
/// Get an `Id` from the request url params.
fn get_id(req: &Request) -> Result<Id> {
req.extensions
.get::<Router>()
.unwrap()
.find("id")
.unwrap_or("")
.try_into()
}
/// Get a new Redis connection.
fn get_conn() -> Result<redis::Connection> {
let client = redis::Client::open("redis://127.0.0.1/")?;
client.get_connection().map_err(|e| e.into())
}
/// Get the data for a `Person` from Redis.
fn get_person_data(conn: redis::Connection, id: &Id) -> Result<String> {
let person_data: Option<String> = conn.get(id.as_ref())?;
person_data.ok_or(Error::from(ErrorKind::PersonNotFound))
}
/// Set the data for a `Person` in Redis.
fn set_person_data(conn: redis::Connection, person: Person) -> Result<()> {
let person_data = serde_json::to_string(&person)?;
conn.set(person.id.as_ref(), person_data)?;
Ok(())
}
/// Get a person from the request body with an id.
fn make_person(req: &mut Request, id: Id) -> Result<Person> {
let cmd: PostPersonCommand = serde_json::from_reader(&mut req.body)?;
Ok(Person {
id: id,
name: cmd.name,
})
}
|
//! from Redis and return them as json.
//! - `post_person` handles `POST /person/:id`, and will update a
//! `Person` in Redis with a new name.
|
random_line_split
|
routes.rs
|
//! # Request handlers
//!
//! Iron's [`router`]() will accept a function closure that takes
//! a mutable `Request` and returns an `IronResult<Response>`.
//! The application specifies two handlers:
//!
//! - `get_person` handles `GET /person/:id`, and will get a `Person`
//! from Redis and return them as json.
//! - `post_person` handles `POST /person/:id`, and will update a
//! `Person` in Redis with a new name.
use serde_json;
use redis::{self, Commands};
use iron::prelude::*;
use iron::status;
use router::Router;
use errors::*;
use model::*;
/// Get a person by id.
///
/// This handler takes an id from the query parameters and gets
/// the corresponding person, or returns a `HTTP 404`.
pub fn get_person(req: &mut Request) -> IronResult<Response> {
let id = get_id(&req)?;
let conn = get_conn()?;
let person_data = get_person_data(conn, &id)?;
Ok(Response::with((status::Ok, person_data)))
}
#[derive(Deserialize)]
struct PostPersonCommand {
pub name: String,
}
/// Post a new person value for an id.
///
/// This handler takes an id and `PostPersonCommand` and adds or updates
/// that person's data.
///
/// The body of the request should look something like:
///
/// ```json
/// { "name": "Some Name" }
/// ```
pub fn post_person(req: &mut Request) -> IronResult<Response> {
let id = get_id(&req)?;
let conn = get_conn()?;
let person = make_person(req, id)?;
set_person_data(conn, person)?;
Ok(Response::with(status::Ok))
}
/// Get an `Id` from the request url params.
fn get_id(req: &Request) -> Result<Id> {
req.extensions
.get::<Router>()
.unwrap()
.find("id")
.unwrap_or("")
.try_into()
}
/// Get a new Redis connection.
fn get_conn() -> Result<redis::Connection> {
let client = redis::Client::open("redis://127.0.0.1/")?;
client.get_connection().map_err(|e| e.into())
}
/// Get the data for a `Person` from Redis.
fn get_person_data(conn: redis::Connection, id: &Id) -> Result<String>
|
/// Set the data for a `Person` in Redis.
fn set_person_data(conn: redis::Connection, person: Person) -> Result<()> {
let person_data = serde_json::to_string(&person)?;
conn.set(person.id.as_ref(), person_data)?;
Ok(())
}
/// Get a person from the request body with an id.
fn make_person(req: &mut Request, id: Id) -> Result<Person> {
let cmd: PostPersonCommand = serde_json::from_reader(&mut req.body)?;
Ok(Person {
id: id,
name: cmd.name,
})
}
|
{
let person_data: Option<String> = conn.get(id.as_ref())?;
person_data.ok_or(Error::from(ErrorKind::PersonNotFound))
}
|
identifier_body
|
bytes.rs
|
struct Unit<'a> {
value: f32,
name: &'a str,
}
impl<'a> Unit<'a> {
const fn new(value: f32, name: &str) -> Unit {
Unit { value, name }
}
}
const BYTE_UNITS: [Unit; 6] = [
Unit::new(1e3, "k"),
Unit::new(1e6, "M"),
Unit::new(1e9, "G"),
Unit::new(1e12, "T"),
Unit::new(1e15, "P"),
Unit::new(1e18, "E"),
];
pub fn format_bytes(bytes: u64) -> String {
let bytes = bytes as f32;
let unit = BYTE_UNITS
.iter()
.take_while(|unit| unit.value <= bytes)
.last()
.unwrap_or(&BYTE_UNITS[0]);
let value = bytes / unit.value;
let n_decimals = if value < 10.0 {
2
} else if value < 100.0 {
1
} else {
0
};
let decimal_point = if n_decimals == 0 { "." } else
|
;
format!(
"{:.*}{} {}",
n_decimals,
bytes / unit.value,
decimal_point,
unit.name
)
}
|
{ "" }
|
conditional_block
|
bytes.rs
|
struct
|
<'a> {
value: f32,
name: &'a str,
}
impl<'a> Unit<'a> {
const fn new(value: f32, name: &str) -> Unit {
Unit { value, name }
}
}
const BYTE_UNITS: [Unit; 6] = [
Unit::new(1e3, "k"),
Unit::new(1e6, "M"),
Unit::new(1e9, "G"),
Unit::new(1e12, "T"),
Unit::new(1e15, "P"),
Unit::new(1e18, "E"),
];
pub fn format_bytes(bytes: u64) -> String {
let bytes = bytes as f32;
let unit = BYTE_UNITS
.iter()
.take_while(|unit| unit.value <= bytes)
.last()
.unwrap_or(&BYTE_UNITS[0]);
let value = bytes / unit.value;
let n_decimals = if value < 10.0 {
2
} else if value < 100.0 {
1
} else {
0
};
let decimal_point = if n_decimals == 0 { "." } else { "" };
format!(
"{:.*}{} {}",
n_decimals,
bytes / unit.value,
decimal_point,
unit.name
)
}
|
Unit
|
identifier_name
|
bytes.rs
|
struct Unit<'a> {
value: f32,
name: &'a str,
}
impl<'a> Unit<'a> {
const fn new(value: f32, name: &str) -> Unit {
Unit { value, name }
}
}
const BYTE_UNITS: [Unit; 6] = [
Unit::new(1e3, "k"),
Unit::new(1e6, "M"),
Unit::new(1e9, "G"),
Unit::new(1e12, "T"),
Unit::new(1e15, "P"),
Unit::new(1e18, "E"),
];
pub fn format_bytes(bytes: u64) -> String {
let bytes = bytes as f32;
let unit = BYTE_UNITS
.iter()
.take_while(|unit| unit.value <= bytes)
.last()
.unwrap_or(&BYTE_UNITS[0]);
let value = bytes / unit.value;
let n_decimals = if value < 10.0 {
2
} else if value < 100.0 {
1
} else {
0
};
let decimal_point = if n_decimals == 0 { "." } else { "" };
format!(
"{:.*}{} {}",
n_decimals,
bytes / unit.value,
|
decimal_point,
unit.name
)
}
|
random_line_split
|
|
bytes.rs
|
struct Unit<'a> {
value: f32,
name: &'a str,
}
impl<'a> Unit<'a> {
const fn new(value: f32, name: &str) -> Unit {
Unit { value, name }
}
}
const BYTE_UNITS: [Unit; 6] = [
Unit::new(1e3, "k"),
Unit::new(1e6, "M"),
Unit::new(1e9, "G"),
Unit::new(1e12, "T"),
Unit::new(1e15, "P"),
Unit::new(1e18, "E"),
];
pub fn format_bytes(bytes: u64) -> String
|
bytes / unit.value,
decimal_point,
unit.name
)
}
|
{
let bytes = bytes as f32;
let unit = BYTE_UNITS
.iter()
.take_while(|unit| unit.value <= bytes)
.last()
.unwrap_or(&BYTE_UNITS[0]);
let value = bytes / unit.value;
let n_decimals = if value < 10.0 {
2
} else if value < 100.0 {
1
} else {
0
};
let decimal_point = if n_decimals == 0 { "." } else { "" };
format!(
"{:.*}{} {}",
n_decimals,
|
identifier_body
|
macros.rs
|
//! Various macros
/// Generates an integer type `Name` where computations are done modulo `m`
#[macro_export]
macro_rules! define_modulo_ring(
($Name:ident, $m:expr) => (
#[deriving(PartialEq,Clone)]
struct $Name {
value: i64
}
impl $Name {
fn new(v: i64) -> $Name {
let x=v%$m;
if x < 0 {
$Name { value: x + $m }
} else {
$Name { value: x }
}
}
}
impl core::fmt::Show for $Name {
fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {
write!(fmt, "{} (mod {})", self.value, $m)
}
}
impl Add<$Name,$Name> for $Name {
fn add(&self, other: &$Name) -> $Name {
$Name { value: (self.value + other.value)%$m }
}
}
impl Sub<$Name,$Name> for $Name {
fn sub(&self, other: &$Name) -> $Name {
$Name { value: (self.value - other.value)%$m }
}
}
impl Mul<$Name,$Name> for $Name {
fn mul(&self, other: &$Name) -> $Name {
$Name { value: (self.value*other.value)%$m }
}
}
impl Div<$Name,$Name> for $Name {
fn div(&self, other: &$Name) -> $Name {
let inv=algoritmoi::moduli::invert_mod(other.value, $m);
match inv {
Some(i) => $Name::new(self.value*i),
_ => fail!("Trying to divide by {} modulo {}", other.value, $m)
}
}
}
impl Neg<$Name> for $Name {
fn neg(&self) -> $Name {
$Name { value: $m - self.value }
}
}
impl core::num::One for $Name {
fn one() -> $Name {
$Name { value: core::num::One::one() }
}
}
impl core::num::Zero for $Name {
fn zero() -> $Name {
$Name { value: core::num::Zero::zero() }
}
fn is_zero(&self) -> bool {
self.value%$m == 0
}
}
impl FromPrimitive for $Name {
fn from_i64(n: i64) -> Option<$Name> {
Some($Name { value: n%$m })
}
fn from_u64(n: u64) -> Option<$Name> {
Some($Name { value: (n%$m) as i64 })
}
}
|
)
)
|
random_line_split
|
|
htmltrackelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLTrackElementBinding;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::htmlelement::HTMLElement;
use dom::node::Node;
use util::str::DOMString;
#[dom_struct]
pub struct HTMLTrackElement {
htmlelement: HTMLElement,
}
impl HTMLTrackElement {
|
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLTrackElement> {
let element = HTMLTrackElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLTrackElementBinding::Wrap)
}
}
|
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document) -> HTMLTrackElement {
HTMLTrackElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document)
|
random_line_split
|
htmltrackelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLTrackElementBinding;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::htmlelement::HTMLElement;
use dom::node::Node;
use util::str::DOMString;
#[dom_struct]
pub struct HTMLTrackElement {
htmlelement: HTMLElement,
}
impl HTMLTrackElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document) -> HTMLTrackElement
|
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLTrackElement> {
let element = HTMLTrackElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLTrackElementBinding::Wrap)
}
}
|
{
HTMLTrackElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document)
}
}
|
identifier_body
|
htmltrackelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLTrackElementBinding;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::htmlelement::HTMLElement;
use dom::node::Node;
use util::str::DOMString;
#[dom_struct]
pub struct
|
{
htmlelement: HTMLElement,
}
impl HTMLTrackElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document) -> HTMLTrackElement {
HTMLTrackElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLTrackElement> {
let element = HTMLTrackElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLTrackElementBinding::Wrap)
}
}
|
HTMLTrackElement
|
identifier_name
|
htmlfieldsetelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::Attr;
use dom::bindings::codegen::Bindings::HTMLFieldSetElementBinding;
use dom::bindings::codegen::Bindings::HTMLFieldSetElementBinding::HTMLFieldSetElementMethods;
use dom::bindings::inheritance::{Castable, ElementTypeId, HTMLElementTypeId, NodeTypeId};
use dom::bindings::js::{MutNullableJS, Root};
use dom::document::Document;
use dom::element::{AttributeMutation, Element};
use dom::htmlcollection::{CollectionFilter, HTMLCollection};
use dom::htmlelement::HTMLElement;
use dom::htmlformelement::{FormControl, HTMLFormElement};
use dom::htmllegendelement::HTMLLegendElement;
use dom::node::{Node, window_from_node};
use dom::validitystate::ValidityState;
use dom::virtualmethods::VirtualMethods;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
use std::default::Default;
use style::element_state::*;
#[dom_struct]
pub struct HTMLFieldSetElement {
htmlelement: HTMLElement,
form_owner: MutNullableJS<HTMLFormElement>,
}
impl HTMLFieldSetElement {
fn new_inherited(local_name: LocalName,
prefix: Option<Prefix>,
document: &Document) -> HTMLFieldSetElement {
HTMLFieldSetElement {
htmlelement:
HTMLElement::new_inherited_with_state(IN_ENABLED_STATE,
local_name, prefix, document),
form_owner: Default::default(),
}
}
#[allow(unrooted_must_root)]
pub fn new(local_name: LocalName,
prefix: Option<Prefix>,
document: &Document) -> Root<HTMLFieldSetElement> {
Node::reflect_node(box HTMLFieldSetElement::new_inherited(local_name, prefix, document),
document,
HTMLFieldSetElementBinding::Wrap)
}
}
impl HTMLFieldSetElementMethods for HTMLFieldSetElement {
// https://html.spec.whatwg.org/multipage/#dom-fieldset-elements
fn
|
(&self) -> Root<HTMLCollection> {
#[derive(HeapSizeOf, JSTraceable)]
struct ElementsFilter;
impl CollectionFilter for ElementsFilter {
fn filter<'a>(&self, elem: &'a Element, _root: &'a Node) -> bool {
elem.downcast::<HTMLElement>()
.map_or(false, HTMLElement::is_listed_element)
}
}
let filter = box ElementsFilter;
let window = window_from_node(self);
HTMLCollection::create(&window, self.upcast(), filter)
}
// https://html.spec.whatwg.org/multipage/#dom-cva-validity
fn Validity(&self) -> Root<ValidityState> {
let window = window_from_node(self);
ValidityState::new(&window, self.upcast())
}
// https://html.spec.whatwg.org/multipage/#dom-fieldset-disabled
make_bool_getter!(Disabled, "disabled");
// https://html.spec.whatwg.org/multipage/#dom-fieldset-disabled
make_bool_setter!(SetDisabled, "disabled");
// https://html.spec.whatwg.org/multipage/#dom-fae-form
fn GetForm(&self) -> Option<Root<HTMLFormElement>> {
self.form_owner()
}
}
impl VirtualMethods for HTMLFieldSetElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match attr.local_name() {
&local_name!("disabled") => {
let disabled_state = match mutation {
AttributeMutation::Set(None) => true,
AttributeMutation::Set(Some(_)) => {
// Fieldset was already disabled before.
return;
},
AttributeMutation::Removed => false,
};
let node = self.upcast::<Node>();
let el = self.upcast::<Element>();
el.set_disabled_state(disabled_state);
el.set_enabled_state(!disabled_state);
let mut found_legend = false;
let children = node.children().filter(|node| {
if found_legend {
true
} else if node.is::<HTMLLegendElement>() {
found_legend = true;
false
} else {
true
}
});
let fields = children.flat_map(|child| {
child.traverse_preorder().filter(|descendant| {
match descendant.type_id() {
NodeTypeId::Element(
ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLButtonElement)) |
NodeTypeId::Element(
ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLInputElement)) |
NodeTypeId::Element(
ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLSelectElement)) |
NodeTypeId::Element(
ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLTextAreaElement)) => {
true
},
_ => false,
}
})
});
if disabled_state {
for field in fields {
let el = field.downcast::<Element>().unwrap();
el.set_disabled_state(true);
el.set_enabled_state(false);
}
} else {
for field in fields {
let el = field.downcast::<Element>().unwrap();
el.check_disabled_attribute();
el.check_ancestors_disabled_state_for_form_control();
}
}
},
&local_name!("form") => {
self.form_attribute_mutated(mutation);
},
_ => {},
}
}
}
impl FormControl for HTMLFieldSetElement {
fn form_owner(&self) -> Option<Root<HTMLFormElement>> {
self.form_owner.get()
}
fn set_form_owner(&self, form: Option<&HTMLFormElement>) {
self.form_owner.set(form);
}
fn to_element<'a>(&'a self) -> &'a Element {
self.upcast::<Element>()
}
}
|
Elements
|
identifier_name
|
htmlfieldsetelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::Attr;
use dom::bindings::codegen::Bindings::HTMLFieldSetElementBinding;
use dom::bindings::codegen::Bindings::HTMLFieldSetElementBinding::HTMLFieldSetElementMethods;
use dom::bindings::inheritance::{Castable, ElementTypeId, HTMLElementTypeId, NodeTypeId};
use dom::bindings::js::{MutNullableJS, Root};
use dom::document::Document;
use dom::element::{AttributeMutation, Element};
use dom::htmlcollection::{CollectionFilter, HTMLCollection};
use dom::htmlelement::HTMLElement;
use dom::htmlformelement::{FormControl, HTMLFormElement};
use dom::htmllegendelement::HTMLLegendElement;
use dom::node::{Node, window_from_node};
use dom::validitystate::ValidityState;
use dom::virtualmethods::VirtualMethods;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
use std::default::Default;
use style::element_state::*;
#[dom_struct]
pub struct HTMLFieldSetElement {
htmlelement: HTMLElement,
form_owner: MutNullableJS<HTMLFormElement>,
}
impl HTMLFieldSetElement {
fn new_inherited(local_name: LocalName,
prefix: Option<Prefix>,
document: &Document) -> HTMLFieldSetElement {
HTMLFieldSetElement {
htmlelement:
HTMLElement::new_inherited_with_state(IN_ENABLED_STATE,
local_name, prefix, document),
form_owner: Default::default(),
}
}
#[allow(unrooted_must_root)]
pub fn new(local_name: LocalName,
prefix: Option<Prefix>,
document: &Document) -> Root<HTMLFieldSetElement> {
Node::reflect_node(box HTMLFieldSetElement::new_inherited(local_name, prefix, document),
document,
HTMLFieldSetElementBinding::Wrap)
}
}
impl HTMLFieldSetElementMethods for HTMLFieldSetElement {
// https://html.spec.whatwg.org/multipage/#dom-fieldset-elements
fn Elements(&self) -> Root<HTMLCollection> {
#[derive(HeapSizeOf, JSTraceable)]
struct ElementsFilter;
impl CollectionFilter for ElementsFilter {
fn filter<'a>(&self, elem: &'a Element, _root: &'a Node) -> bool {
elem.downcast::<HTMLElement>()
.map_or(false, HTMLElement::is_listed_element)
}
}
let filter = box ElementsFilter;
let window = window_from_node(self);
HTMLCollection::create(&window, self.upcast(), filter)
}
// https://html.spec.whatwg.org/multipage/#dom-cva-validity
fn Validity(&self) -> Root<ValidityState> {
let window = window_from_node(self);
ValidityState::new(&window, self.upcast())
}
// https://html.spec.whatwg.org/multipage/#dom-fieldset-disabled
make_bool_getter!(Disabled, "disabled");
// https://html.spec.whatwg.org/multipage/#dom-fieldset-disabled
make_bool_setter!(SetDisabled, "disabled");
// https://html.spec.whatwg.org/multipage/#dom-fae-form
fn GetForm(&self) -> Option<Root<HTMLFormElement>> {
self.form_owner()
}
}
impl VirtualMethods for HTMLFieldSetElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match attr.local_name() {
&local_name!("disabled") => {
let disabled_state = match mutation {
AttributeMutation::Set(None) => true,
AttributeMutation::Set(Some(_)) => {
// Fieldset was already disabled before.
return;
},
AttributeMutation::Removed => false,
};
let node = self.upcast::<Node>();
let el = self.upcast::<Element>();
el.set_disabled_state(disabled_state);
el.set_enabled_state(!disabled_state);
let mut found_legend = false;
let children = node.children().filter(|node| {
if found_legend {
true
|
}
});
let fields = children.flat_map(|child| {
child.traverse_preorder().filter(|descendant| {
match descendant.type_id() {
NodeTypeId::Element(
ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLButtonElement)) |
NodeTypeId::Element(
ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLInputElement)) |
NodeTypeId::Element(
ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLSelectElement)) |
NodeTypeId::Element(
ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLTextAreaElement)) => {
true
},
_ => false,
}
})
});
if disabled_state {
for field in fields {
let el = field.downcast::<Element>().unwrap();
el.set_disabled_state(true);
el.set_enabled_state(false);
}
} else {
for field in fields {
let el = field.downcast::<Element>().unwrap();
el.check_disabled_attribute();
el.check_ancestors_disabled_state_for_form_control();
}
}
},
&local_name!("form") => {
self.form_attribute_mutated(mutation);
},
_ => {},
}
}
}
impl FormControl for HTMLFieldSetElement {
fn form_owner(&self) -> Option<Root<HTMLFormElement>> {
self.form_owner.get()
}
fn set_form_owner(&self, form: Option<&HTMLFormElement>) {
self.form_owner.set(form);
}
fn to_element<'a>(&'a self) -> &'a Element {
self.upcast::<Element>()
}
}
|
} else if node.is::<HTMLLegendElement>() {
found_legend = true;
false
} else {
true
|
random_line_split
|
main.rs
|
#![feature(drain_filter)]
#![feature(result_flattening)]
#![feature(trace_macros)]
#![feature(try_blocks)]
#![feature(never_type)]
use std::env;
use std::time::Duration;
use err_derive::Error;
use getopts::Options;
use futures::future;
mod utils;
mod feeds;
mod config;
use config::Config;
mod providers;
use providers::Providers;
mod interfaces;
use interfaces::Interfaces;
mod state;
use state::State;
#[tokio::main]
pub async fn main() -> Result<(), Error> {
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let mut opts = Options::new();
opts.optopt("c", "config", "Select fallback device to use", "config.json");
opts.optflag("h", "help", "Print this help menu");
let matches = opts.parse(&args[1..])?;
if matches.opt_present("h") {
print_usage(&program, opts);
return Ok(());
}
let config = matches.opt_get("c")?
.unwrap_or("config.json".to_string());
|
let interfaces = Interfaces::new(config.interfaces);
let state = State::new(config.feeds);
let fetch_interval = Duration::from_secs(config.fetch_interval_secs);
future::try_join(providers.fetch_loop(state.clone(), fetch_interval),
interfaces.serve(state.clone())).await?;
Ok(())
}
fn print_usage(program: &str, opts: Options) {
let brief = format!("Usage: {} [options]", program);
print!("{}", opts.usage(&brief));
}
#[derive(Debug, Error)]
pub enum Error {
#[error(display = "{}", _0)] GetoptsError(#[error(source)] getopts::Fail),
#[error(display = "{}", _0)] Infallible(#[error(source)] std::convert::Infallible),
#[error(display = "{}", _0)] ConfigLoadError(#[error(source)] config::LoadError),
#[error(display = "{}", _0)] JoinError(#[error(source)] tokio::task::JoinError),
}
|
println!("Loading config...");
let config = Config::load(config).await?;
println!("Config Loaded");
let mut providers = Providers::new(config.providers);
|
random_line_split
|
main.rs
|
#![feature(drain_filter)]
#![feature(result_flattening)]
#![feature(trace_macros)]
#![feature(try_blocks)]
#![feature(never_type)]
use std::env;
use std::time::Duration;
use err_derive::Error;
use getopts::Options;
use futures::future;
mod utils;
mod feeds;
mod config;
use config::Config;
mod providers;
use providers::Providers;
mod interfaces;
use interfaces::Interfaces;
mod state;
use state::State;
#[tokio::main]
pub async fn
|
() -> Result<(), Error> {
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let mut opts = Options::new();
opts.optopt("c", "config", "Select fallback device to use", "config.json");
opts.optflag("h", "help", "Print this help menu");
let matches = opts.parse(&args[1..])?;
if matches.opt_present("h") {
print_usage(&program, opts);
return Ok(());
}
let config = matches.opt_get("c")?
.unwrap_or("config.json".to_string());
println!("Loading config...");
let config = Config::load(config).await?;
println!("Config Loaded");
let mut providers = Providers::new(config.providers);
let interfaces = Interfaces::new(config.interfaces);
let state = State::new(config.feeds);
let fetch_interval = Duration::from_secs(config.fetch_interval_secs);
future::try_join(providers.fetch_loop(state.clone(), fetch_interval),
interfaces.serve(state.clone())).await?;
Ok(())
}
fn print_usage(program: &str, opts: Options) {
let brief = format!("Usage: {} [options]", program);
print!("{}", opts.usage(&brief));
}
#[derive(Debug, Error)]
pub enum Error {
#[error(display = "{}", _0)] GetoptsError(#[error(source)] getopts::Fail),
#[error(display = "{}", _0)] Infallible(#[error(source)] std::convert::Infallible),
#[error(display = "{}", _0)] ConfigLoadError(#[error(source)] config::LoadError),
#[error(display = "{}", _0)] JoinError(#[error(source)] tokio::task::JoinError),
}
|
main
|
identifier_name
|
main.rs
|
#![feature(drain_filter)]
#![feature(result_flattening)]
#![feature(trace_macros)]
#![feature(try_blocks)]
#![feature(never_type)]
use std::env;
use std::time::Duration;
use err_derive::Error;
use getopts::Options;
use futures::future;
mod utils;
mod feeds;
mod config;
use config::Config;
mod providers;
use providers::Providers;
mod interfaces;
use interfaces::Interfaces;
mod state;
use state::State;
#[tokio::main]
pub async fn main() -> Result<(), Error> {
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let mut opts = Options::new();
opts.optopt("c", "config", "Select fallback device to use", "config.json");
opts.optflag("h", "help", "Print this help menu");
let matches = opts.parse(&args[1..])?;
if matches.opt_present("h")
|
let config = matches.opt_get("c")?
.unwrap_or("config.json".to_string());
println!("Loading config...");
let config = Config::load(config).await?;
println!("Config Loaded");
let mut providers = Providers::new(config.providers);
let interfaces = Interfaces::new(config.interfaces);
let state = State::new(config.feeds);
let fetch_interval = Duration::from_secs(config.fetch_interval_secs);
future::try_join(providers.fetch_loop(state.clone(), fetch_interval),
interfaces.serve(state.clone())).await?;
Ok(())
}
fn print_usage(program: &str, opts: Options) {
let brief = format!("Usage: {} [options]", program);
print!("{}", opts.usage(&brief));
}
#[derive(Debug, Error)]
pub enum Error {
#[error(display = "{}", _0)] GetoptsError(#[error(source)] getopts::Fail),
#[error(display = "{}", _0)] Infallible(#[error(source)] std::convert::Infallible),
#[error(display = "{}", _0)] ConfigLoadError(#[error(source)] config::LoadError),
#[error(display = "{}", _0)] JoinError(#[error(source)] tokio::task::JoinError),
}
|
{
print_usage(&program, opts);
return Ok(());
}
|
conditional_block
|
liquid.rs
|
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
pub static FIXTURES: &[&str] = &["Hello World"];
fn bench_fixtures(c: &mut Criterion) {
let mut group = c.benchmark_group("liquid_bench_fixtures");
for fixture in FIXTURES {
group.bench_function(BenchmarkId::new("parse", fixture), |b| {
let parser = liquid::ParserBuilder::with_stdlib().build().unwrap();
b.iter(|| parser.parse(fixture));
});
|
let template = parser
.parse(fixture)
.expect("Benchmark template parsing failed");
let data = liquid::Object::new();
template.render(&data).unwrap();
b.iter(|| template.render(&data));
});
}
group.finish();
}
criterion_group!(benches, bench_fixtures);
criterion_main!(benches);
|
group.bench_function(BenchmarkId::new("render", fixture), |b| {
let parser = liquid::ParserBuilder::with_stdlib().build().unwrap();
|
random_line_split
|
liquid.rs
|
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
pub static FIXTURES: &[&str] = &["Hello World"];
fn bench_fixtures(c: &mut Criterion)
|
}
criterion_group!(benches, bench_fixtures);
criterion_main!(benches);
|
{
let mut group = c.benchmark_group("liquid_bench_fixtures");
for fixture in FIXTURES {
group.bench_function(BenchmarkId::new("parse", fixture), |b| {
let parser = liquid::ParserBuilder::with_stdlib().build().unwrap();
b.iter(|| parser.parse(fixture));
});
group.bench_function(BenchmarkId::new("render", fixture), |b| {
let parser = liquid::ParserBuilder::with_stdlib().build().unwrap();
let template = parser
.parse(fixture)
.expect("Benchmark template parsing failed");
let data = liquid::Object::new();
template.render(&data).unwrap();
b.iter(|| template.render(&data));
});
}
group.finish();
|
identifier_body
|
liquid.rs
|
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
pub static FIXTURES: &[&str] = &["Hello World"];
fn
|
(c: &mut Criterion) {
let mut group = c.benchmark_group("liquid_bench_fixtures");
for fixture in FIXTURES {
group.bench_function(BenchmarkId::new("parse", fixture), |b| {
let parser = liquid::ParserBuilder::with_stdlib().build().unwrap();
b.iter(|| parser.parse(fixture));
});
group.bench_function(BenchmarkId::new("render", fixture), |b| {
let parser = liquid::ParserBuilder::with_stdlib().build().unwrap();
let template = parser
.parse(fixture)
.expect("Benchmark template parsing failed");
let data = liquid::Object::new();
template.render(&data).unwrap();
b.iter(|| template.render(&data));
});
}
group.finish();
}
criterion_group!(benches, bench_fixtures);
criterion_main!(benches);
|
bench_fixtures
|
identifier_name
|
io.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Snapshot i/o.
//! Ways of writing and reading snapshots. This module supports writing and reading
//! snapshots of two different formats: packed and loose.
//! Packed snapshots are written to a single file, and loose snapshots are
//! written to multiple files in one directory.
use std::collections::HashMap;
use std::io::{self, Read, Seek, SeekFrom, Write};
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use util::Bytes;
use util::hash::H256;
use rlp::{self, Encodable, RlpStream, UntrustedRlp, Stream, View};
use super::ManifestData;
/// Something which can write snapshots.
/// Writing the same chunk multiple times will lead to implementation-defined
/// behavior, and is not advised.
pub trait SnapshotWriter {
/// Write a compressed state chunk.
fn write_state_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()>;
/// Write a compressed block chunk.
fn write_block_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()>;
/// Complete writing. The manifest's chunk lists must be consistent
/// with the chunks written.
fn finish(self, manifest: ManifestData) -> io::Result<()> where Self: Sized;
}
// (hash, len, offset)
struct ChunkInfo(H256, u64, u64);
impl Encodable for ChunkInfo {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(3);
s.append(&self.0).append(&self.1).append(&self.2);
}
}
impl rlp::Decodable for ChunkInfo {
fn decode<D: rlp::Decoder>(decoder: &D) -> Result<Self, rlp::DecoderError> {
let d = decoder.as_rlp();
let hash = try!(d.val_at(0));
let len = try!(d.val_at(1));
let off = try!(d.val_at(2));
Ok(ChunkInfo(hash, len, off))
}
}
/// A packed snapshot writer. This writes snapshots to a single concatenated file.
///
/// The file format is very simple and consists of three parts:
/// [Concatenated chunk data]
/// [manifest as RLP]
/// [manifest start offset (8 bytes little-endian)]
///
/// The manifest contains all the same information as a standard `ManifestData`,
/// but also maps chunk hashes to their lengths and offsets in the file
/// for easy reading.
pub struct PackedWriter {
file: File,
state_hashes: Vec<ChunkInfo>,
block_hashes: Vec<ChunkInfo>,
cur_len: u64,
}
impl PackedWriter {
/// Create a new "PackedWriter", to write into the file at the given path.
pub fn new(path: &Path) -> io::Result<Self> {
Ok(PackedWriter {
file: try!(File::create(path)),
state_hashes: Vec::new(),
block_hashes: Vec::new(),
cur_len: 0,
})
}
}
impl SnapshotWriter for PackedWriter {
fn write_state_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
try!(self.file.write_all(chunk));
let len = chunk.len() as u64;
self.state_hashes.push(ChunkInfo(hash, len, self.cur_len));
self.cur_len += len;
Ok(())
}
fn write_block_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
try!(self.file.write_all(chunk));
let len = chunk.len() as u64;
self.block_hashes.push(ChunkInfo(hash, len, self.cur_len));
self.cur_len += len;
Ok(())
}
fn finish(mut self, manifest: ManifestData) -> io::Result<()> {
// we ignore the hashes fields of the manifest under the assumption that
// they are consistent with ours.
let mut stream = RlpStream::new_list(5);
stream
.append(&self.state_hashes)
.append(&self.block_hashes)
.append(&manifest.state_root)
.append(&manifest.block_number)
.append(&manifest.block_hash);
let manifest_rlp = stream.out();
try!(self.file.write_all(&manifest_rlp));
let off = self.cur_len;
trace!(target: "snapshot_io", "writing manifest of len {} to offset {}", manifest_rlp.len(), off);
let off_bytes: [u8; 8] =
[
off as u8,
(off >> 8) as u8,
(off >> 16) as u8,
(off >> 24) as u8,
(off >> 32) as u8,
(off >> 40) as u8,
(off >> 48) as u8,
(off >> 56) as u8,
];
try!(self.file.write_all(&off_bytes[..]));
Ok(())
}
}
/// A "loose" writer writes chunk files into a directory.
pub struct LooseWriter {
dir: PathBuf,
}
impl LooseWriter {
/// Create a new LooseWriter which will write into the given directory,
/// creating it if it doesn't exist.
pub fn new(path: PathBuf) -> io::Result<Self> {
try!(fs::create_dir_all(&path));
Ok(LooseWriter {
dir: path,
})
}
// writing logic is the same for both kinds of chunks.
fn write_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
let mut file_path = self.dir.clone();
file_path.push(hash.hex());
let mut file = try!(File::create(file_path));
try!(file.write_all(chunk));
Ok(())
}
}
impl SnapshotWriter for LooseWriter {
fn write_state_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
self.write_chunk(hash, chunk)
}
fn write_block_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
self.write_chunk(hash, chunk)
}
fn finish(self, manifest: ManifestData) -> io::Result<()> {
let rlp = manifest.into_rlp();
let mut path = self.dir.clone();
path.push("MANIFEST");
let mut file = try!(File::create(path));
try!(file.write_all(&rlp[..]));
Ok(())
}
}
/// Something which can read compressed snapshots.
pub trait SnapshotReader {
/// Get the manifest data for this snapshot.
fn manifest(&self) -> &ManifestData;
/// Get raw chunk data by hash. implementation defined behavior
/// if a chunk not in the manifest is requested.
fn chunk(&self, hash: H256) -> io::Result<Bytes>;
}
/// Packed snapshot reader.
pub struct
|
{
file: File,
state_hashes: HashMap<H256, (u64, u64)>, // len, offset
block_hashes: HashMap<H256, (u64, u64)>, // len, offset
manifest: ManifestData,
}
impl PackedReader {
/// Create a new `PackedReader` for the file at the given path.
/// This will fail if any io errors are encountered or the file
/// is not a valid packed snapshot.
pub fn new(path: &Path) -> Result<Option<Self>, ::error::Error> {
let mut file = try!(File::open(path));
let file_len = try!(file.metadata()).len();
if file_len < 8 {
// ensure we don't seek before beginning.
return Ok(None);
}
try!(file.seek(SeekFrom::End(-8)));
let mut off_bytes = [0u8; 8];
try!(file.read_exact(&mut off_bytes[..]));
let manifest_off: u64 =
((off_bytes[7] as u64) << 56) +
((off_bytes[6] as u64) << 48) +
((off_bytes[5] as u64) << 40) +
((off_bytes[4] as u64) << 32) +
((off_bytes[3] as u64) << 24) +
((off_bytes[2] as u64) << 16) +
((off_bytes[1] as u64) << 8) +
(off_bytes[0] as u64);
let manifest_len = file_len - manifest_off - 8;
trace!(target: "snapshot", "loading manifest of length {} from offset {}", manifest_len, manifest_off);
let mut manifest_buf = vec![0; manifest_len as usize];
try!(file.seek(SeekFrom::Start(manifest_off)));
try!(file.read_exact(&mut manifest_buf));
let rlp = UntrustedRlp::new(&manifest_buf);
let state: Vec<ChunkInfo> = try!(rlp.val_at(0));
let blocks: Vec<ChunkInfo> = try!(rlp.val_at(1));
let manifest = ManifestData {
state_hashes: state.iter().map(|c| c.0).collect(),
block_hashes: blocks.iter().map(|c| c.0).collect(),
state_root: try!(rlp.val_at(2)),
block_number: try!(rlp.val_at(3)),
block_hash: try!(rlp.val_at(4)),
};
Ok(Some(PackedReader {
file: file,
state_hashes: state.into_iter().map(|c| (c.0, (c.1, c.2))).collect(),
block_hashes: blocks.into_iter().map(|c| (c.0, (c.1, c.2))).collect(),
manifest: manifest
}))
}
}
impl SnapshotReader for PackedReader {
fn manifest(&self) -> &ManifestData {
&self.manifest
}
fn chunk(&self, hash: H256) -> io::Result<Bytes> {
let &(len, off) = self.state_hashes.get(&hash).or_else(|| self.block_hashes.get(&hash))
.expect("only chunks in the manifest can be requested; qed");
let mut file = &self.file;
try!(file.seek(SeekFrom::Start(off)));
let mut buf = vec![0; len as usize];
try!(file.read_exact(&mut buf[..]));
Ok(buf)
}
}
/// reader for "loose" snapshots
pub struct LooseReader {
dir: PathBuf,
manifest: ManifestData,
}
impl LooseReader {
/// Create a new `LooseReader` which will read the manifest and chunk data from
/// the given directory.
pub fn new(mut dir: PathBuf) -> Result<Self, ::error::Error> {
let mut manifest_buf = Vec::new();
dir.push("MANIFEST");
let mut manifest_file = try!(File::open(&dir));
try!(manifest_file.read_to_end(&mut manifest_buf));
let manifest = try!(ManifestData::from_rlp(&manifest_buf[..]));
dir.pop();
Ok(LooseReader {
dir: dir,
manifest: manifest,
})
}
}
impl SnapshotReader for LooseReader {
fn manifest(&self) -> &ManifestData {
&self.manifest
}
fn chunk(&self, hash: H256) -> io::Result<Bytes> {
let mut path = self.dir.clone();
path.push(hash.hex());
let mut buf = Vec::new();
let mut file = try!(File::open(&path));
try!(file.read_to_end(&mut buf));
Ok(buf)
}
}
#[cfg(test)]
mod tests {
use devtools::RandomTempPath;
use util::sha3::Hashable;
use snapshot::ManifestData;
use super::{SnapshotWriter, SnapshotReader, PackedWriter, PackedReader, LooseWriter, LooseReader};
const STATE_CHUNKS: &'static [&'static [u8]] = &[b"dog", b"cat", b"hello world", b"hi", b"notarealchunk"];
const BLOCK_CHUNKS: &'static [&'static [u8]] = &[b"hello!", b"goodbye!", b"abcdefg", b"hijklmnop", b"qrstuvwxy", b"and", b"z"];
#[test]
fn packed_write_and_read() {
let path = RandomTempPath::new();
let mut writer = PackedWriter::new(path.as_path()).unwrap();
let mut state_hashes = Vec::new();
let mut block_hashes = Vec::new();
for chunk in STATE_CHUNKS {
let hash = chunk.sha3();
state_hashes.push(hash.clone());
writer.write_state_chunk(hash, chunk).unwrap();
}
for chunk in BLOCK_CHUNKS {
let hash = chunk.sha3();
block_hashes.push(hash.clone());
writer.write_block_chunk(chunk.sha3(), chunk).unwrap();
}
let manifest = ManifestData {
state_hashes: state_hashes,
block_hashes: block_hashes,
state_root: b"notarealroot".sha3(),
block_number: 12345678987654321,
block_hash: b"notarealblock".sha3(),
};
writer.finish(manifest.clone()).unwrap();
let reader = PackedReader::new(path.as_path()).unwrap().unwrap();
assert_eq!(reader.manifest(), &manifest);
for hash in manifest.state_hashes.iter().chain(&manifest.block_hashes) {
reader.chunk(hash.clone()).unwrap();
}
}
#[test]
fn loose_write_and_read() {
let path = RandomTempPath::new();
let mut writer = LooseWriter::new(path.as_path().into()).unwrap();
let mut state_hashes = Vec::new();
let mut block_hashes = Vec::new();
for chunk in STATE_CHUNKS {
let hash = chunk.sha3();
state_hashes.push(hash.clone());
writer.write_state_chunk(hash, chunk).unwrap();
}
for chunk in BLOCK_CHUNKS {
let hash = chunk.sha3();
block_hashes.push(hash.clone());
writer.write_block_chunk(chunk.sha3(), chunk).unwrap();
}
let manifest = ManifestData {
state_hashes: state_hashes,
block_hashes: block_hashes,
state_root: b"notarealroot".sha3(),
block_number: 12345678987654321,
block_hash: b"notarealblock".sha3(),
};
writer.finish(manifest.clone()).unwrap();
let reader = LooseReader::new(path.as_path().into()).unwrap();
assert_eq!(reader.manifest(), &manifest);
for hash in manifest.state_hashes.iter().chain(&manifest.block_hashes) {
reader.chunk(hash.clone()).unwrap();
}
}
}
|
PackedReader
|
identifier_name
|
io.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Snapshot i/o.
//! Ways of writing and reading snapshots. This module supports writing and reading
//! snapshots of two different formats: packed and loose.
//! Packed snapshots are written to a single file, and loose snapshots are
//! written to multiple files in one directory.
use std::collections::HashMap;
use std::io::{self, Read, Seek, SeekFrom, Write};
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use util::Bytes;
use util::hash::H256;
use rlp::{self, Encodable, RlpStream, UntrustedRlp, Stream, View};
use super::ManifestData;
/// Something which can write snapshots.
/// Writing the same chunk multiple times will lead to implementation-defined
/// behavior, and is not advised.
pub trait SnapshotWriter {
/// Write a compressed state chunk.
fn write_state_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()>;
/// Write a compressed block chunk.
fn write_block_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()>;
/// Complete writing. The manifest's chunk lists must be consistent
/// with the chunks written.
fn finish(self, manifest: ManifestData) -> io::Result<()> where Self: Sized;
}
// (hash, len, offset)
struct ChunkInfo(H256, u64, u64);
impl Encodable for ChunkInfo {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(3);
s.append(&self.0).append(&self.1).append(&self.2);
}
}
impl rlp::Decodable for ChunkInfo {
fn decode<D: rlp::Decoder>(decoder: &D) -> Result<Self, rlp::DecoderError> {
let d = decoder.as_rlp();
let hash = try!(d.val_at(0));
let len = try!(d.val_at(1));
let off = try!(d.val_at(2));
Ok(ChunkInfo(hash, len, off))
}
}
/// A packed snapshot writer. This writes snapshots to a single concatenated file.
///
/// The file format is very simple and consists of three parts:
/// [Concatenated chunk data]
/// [manifest as RLP]
/// [manifest start offset (8 bytes little-endian)]
///
/// The manifest contains all the same information as a standard `ManifestData`,
/// but also maps chunk hashes to their lengths and offsets in the file
/// for easy reading.
pub struct PackedWriter {
file: File,
state_hashes: Vec<ChunkInfo>,
block_hashes: Vec<ChunkInfo>,
cur_len: u64,
}
impl PackedWriter {
/// Create a new "PackedWriter", to write into the file at the given path.
pub fn new(path: &Path) -> io::Result<Self> {
Ok(PackedWriter {
file: try!(File::create(path)),
state_hashes: Vec::new(),
block_hashes: Vec::new(),
cur_len: 0,
})
}
}
impl SnapshotWriter for PackedWriter {
fn write_state_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
try!(self.file.write_all(chunk));
let len = chunk.len() as u64;
self.state_hashes.push(ChunkInfo(hash, len, self.cur_len));
self.cur_len += len;
Ok(())
}
fn write_block_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
try!(self.file.write_all(chunk));
let len = chunk.len() as u64;
self.block_hashes.push(ChunkInfo(hash, len, self.cur_len));
self.cur_len += len;
Ok(())
}
fn finish(mut self, manifest: ManifestData) -> io::Result<()> {
// we ignore the hashes fields of the manifest under the assumption that
// they are consistent with ours.
let mut stream = RlpStream::new_list(5);
stream
.append(&self.state_hashes)
.append(&self.block_hashes)
.append(&manifest.state_root)
.append(&manifest.block_number)
.append(&manifest.block_hash);
let manifest_rlp = stream.out();
try!(self.file.write_all(&manifest_rlp));
let off = self.cur_len;
trace!(target: "snapshot_io", "writing manifest of len {} to offset {}", manifest_rlp.len(), off);
let off_bytes: [u8; 8] =
[
off as u8,
(off >> 8) as u8,
(off >> 16) as u8,
(off >> 24) as u8,
(off >> 32) as u8,
(off >> 40) as u8,
(off >> 48) as u8,
(off >> 56) as u8,
];
try!(self.file.write_all(&off_bytes[..]));
Ok(())
}
}
/// A "loose" writer writes chunk files into a directory.
pub struct LooseWriter {
dir: PathBuf,
}
impl LooseWriter {
/// Create a new LooseWriter which will write into the given directory,
/// creating it if it doesn't exist.
pub fn new(path: PathBuf) -> io::Result<Self> {
try!(fs::create_dir_all(&path));
Ok(LooseWriter {
dir: path,
})
}
// writing logic is the same for both kinds of chunks.
fn write_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
let mut file_path = self.dir.clone();
file_path.push(hash.hex());
let mut file = try!(File::create(file_path));
try!(file.write_all(chunk));
Ok(())
}
}
impl SnapshotWriter for LooseWriter {
fn write_state_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
self.write_chunk(hash, chunk)
}
fn write_block_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
self.write_chunk(hash, chunk)
}
fn finish(self, manifest: ManifestData) -> io::Result<()> {
let rlp = manifest.into_rlp();
let mut path = self.dir.clone();
path.push("MANIFEST");
let mut file = try!(File::create(path));
try!(file.write_all(&rlp[..]));
Ok(())
}
}
/// Something which can read compressed snapshots.
pub trait SnapshotReader {
/// Get the manifest data for this snapshot.
fn manifest(&self) -> &ManifestData;
/// Get raw chunk data by hash. implementation defined behavior
/// if a chunk not in the manifest is requested.
fn chunk(&self, hash: H256) -> io::Result<Bytes>;
}
/// Packed snapshot reader.
pub struct PackedReader {
file: File,
state_hashes: HashMap<H256, (u64, u64)>, // len, offset
block_hashes: HashMap<H256, (u64, u64)>, // len, offset
manifest: ManifestData,
}
impl PackedReader {
/// Create a new `PackedReader` for the file at the given path.
/// This will fail if any io errors are encountered or the file
/// is not a valid packed snapshot.
pub fn new(path: &Path) -> Result<Option<Self>, ::error::Error> {
let mut file = try!(File::open(path));
let file_len = try!(file.metadata()).len();
if file_len < 8 {
// ensure we don't seek before beginning.
return Ok(None);
}
try!(file.seek(SeekFrom::End(-8)));
let mut off_bytes = [0u8; 8];
try!(file.read_exact(&mut off_bytes[..]));
let manifest_off: u64 =
((off_bytes[7] as u64) << 56) +
((off_bytes[6] as u64) << 48) +
((off_bytes[5] as u64) << 40) +
((off_bytes[4] as u64) << 32) +
((off_bytes[3] as u64) << 24) +
((off_bytes[2] as u64) << 16) +
((off_bytes[1] as u64) << 8) +
(off_bytes[0] as u64);
let manifest_len = file_len - manifest_off - 8;
trace!(target: "snapshot", "loading manifest of length {} from offset {}", manifest_len, manifest_off);
let mut manifest_buf = vec![0; manifest_len as usize];
try!(file.seek(SeekFrom::Start(manifest_off)));
try!(file.read_exact(&mut manifest_buf));
let rlp = UntrustedRlp::new(&manifest_buf);
let state: Vec<ChunkInfo> = try!(rlp.val_at(0));
let blocks: Vec<ChunkInfo> = try!(rlp.val_at(1));
let manifest = ManifestData {
state_hashes: state.iter().map(|c| c.0).collect(),
block_hashes: blocks.iter().map(|c| c.0).collect(),
state_root: try!(rlp.val_at(2)),
block_number: try!(rlp.val_at(3)),
block_hash: try!(rlp.val_at(4)),
};
Ok(Some(PackedReader {
file: file,
state_hashes: state.into_iter().map(|c| (c.0, (c.1, c.2))).collect(),
block_hashes: blocks.into_iter().map(|c| (c.0, (c.1, c.2))).collect(),
manifest: manifest
}))
}
}
impl SnapshotReader for PackedReader {
fn manifest(&self) -> &ManifestData {
&self.manifest
}
fn chunk(&self, hash: H256) -> io::Result<Bytes> {
let &(len, off) = self.state_hashes.get(&hash).or_else(|| self.block_hashes.get(&hash))
.expect("only chunks in the manifest can be requested; qed");
let mut file = &self.file;
try!(file.seek(SeekFrom::Start(off)));
let mut buf = vec![0; len as usize];
try!(file.read_exact(&mut buf[..]));
Ok(buf)
}
}
/// reader for "loose" snapshots
pub struct LooseReader {
dir: PathBuf,
manifest: ManifestData,
}
impl LooseReader {
/// Create a new `LooseReader` which will read the manifest and chunk data from
/// the given directory.
pub fn new(mut dir: PathBuf) -> Result<Self, ::error::Error> {
let mut manifest_buf = Vec::new();
dir.push("MANIFEST");
let mut manifest_file = try!(File::open(&dir));
try!(manifest_file.read_to_end(&mut manifest_buf));
let manifest = try!(ManifestData::from_rlp(&manifest_buf[..]));
dir.pop();
Ok(LooseReader {
dir: dir,
manifest: manifest,
})
}
}
impl SnapshotReader for LooseReader {
fn manifest(&self) -> &ManifestData {
&self.manifest
}
fn chunk(&self, hash: H256) -> io::Result<Bytes> {
let mut path = self.dir.clone();
path.push(hash.hex());
let mut buf = Vec::new();
let mut file = try!(File::open(&path));
try!(file.read_to_end(&mut buf));
Ok(buf)
}
}
#[cfg(test)]
mod tests {
use devtools::RandomTempPath;
use util::sha3::Hashable;
use snapshot::ManifestData;
use super::{SnapshotWriter, SnapshotReader, PackedWriter, PackedReader, LooseWriter, LooseReader};
const STATE_CHUNKS: &'static [&'static [u8]] = &[b"dog", b"cat", b"hello world", b"hi", b"notarealchunk"];
const BLOCK_CHUNKS: &'static [&'static [u8]] = &[b"hello!", b"goodbye!", b"abcdefg", b"hijklmnop", b"qrstuvwxy", b"and", b"z"];
#[test]
fn packed_write_and_read()
|
state_hashes: state_hashes,
block_hashes: block_hashes,
state_root: b"notarealroot".sha3(),
block_number: 12345678987654321,
block_hash: b"notarealblock".sha3(),
};
writer.finish(manifest.clone()).unwrap();
let reader = PackedReader::new(path.as_path()).unwrap().unwrap();
assert_eq!(reader.manifest(), &manifest);
for hash in manifest.state_hashes.iter().chain(&manifest.block_hashes) {
reader.chunk(hash.clone()).unwrap();
}
}
#[test]
fn loose_write_and_read() {
let path = RandomTempPath::new();
let mut writer = LooseWriter::new(path.as_path().into()).unwrap();
let mut state_hashes = Vec::new();
let mut block_hashes = Vec::new();
for chunk in STATE_CHUNKS {
let hash = chunk.sha3();
state_hashes.push(hash.clone());
writer.write_state_chunk(hash, chunk).unwrap();
}
for chunk in BLOCK_CHUNKS {
let hash = chunk.sha3();
block_hashes.push(hash.clone());
writer.write_block_chunk(chunk.sha3(), chunk).unwrap();
}
let manifest = ManifestData {
state_hashes: state_hashes,
block_hashes: block_hashes,
state_root: b"notarealroot".sha3(),
block_number: 12345678987654321,
block_hash: b"notarealblock".sha3(),
};
writer.finish(manifest.clone()).unwrap();
let reader = LooseReader::new(path.as_path().into()).unwrap();
assert_eq!(reader.manifest(), &manifest);
for hash in manifest.state_hashes.iter().chain(&manifest.block_hashes) {
reader.chunk(hash.clone()).unwrap();
}
}
}
|
{
let path = RandomTempPath::new();
let mut writer = PackedWriter::new(path.as_path()).unwrap();
let mut state_hashes = Vec::new();
let mut block_hashes = Vec::new();
for chunk in STATE_CHUNKS {
let hash = chunk.sha3();
state_hashes.push(hash.clone());
writer.write_state_chunk(hash, chunk).unwrap();
}
for chunk in BLOCK_CHUNKS {
let hash = chunk.sha3();
block_hashes.push(hash.clone());
writer.write_block_chunk(chunk.sha3(), chunk).unwrap();
}
let manifest = ManifestData {
|
identifier_body
|
io.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Snapshot i/o.
//! Ways of writing and reading snapshots. This module supports writing and reading
//! snapshots of two different formats: packed and loose.
//! Packed snapshots are written to a single file, and loose snapshots are
//! written to multiple files in one directory.
use std::collections::HashMap;
use std::io::{self, Read, Seek, SeekFrom, Write};
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use util::Bytes;
use util::hash::H256;
use rlp::{self, Encodable, RlpStream, UntrustedRlp, Stream, View};
use super::ManifestData;
/// Something which can write snapshots.
/// Writing the same chunk multiple times will lead to implementation-defined
/// behavior, and is not advised.
pub trait SnapshotWriter {
/// Write a compressed state chunk.
fn write_state_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()>;
/// Write a compressed block chunk.
fn write_block_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()>;
/// Complete writing. The manifest's chunk lists must be consistent
/// with the chunks written.
fn finish(self, manifest: ManifestData) -> io::Result<()> where Self: Sized;
}
// (hash, len, offset)
struct ChunkInfo(H256, u64, u64);
impl Encodable for ChunkInfo {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(3);
s.append(&self.0).append(&self.1).append(&self.2);
}
}
impl rlp::Decodable for ChunkInfo {
fn decode<D: rlp::Decoder>(decoder: &D) -> Result<Self, rlp::DecoderError> {
let d = decoder.as_rlp();
let hash = try!(d.val_at(0));
let len = try!(d.val_at(1));
let off = try!(d.val_at(2));
Ok(ChunkInfo(hash, len, off))
}
}
/// A packed snapshot writer. This writes snapshots to a single concatenated file.
///
/// The file format is very simple and consists of three parts:
/// [Concatenated chunk data]
/// [manifest as RLP]
/// [manifest start offset (8 bytes little-endian)]
///
/// The manifest contains all the same information as a standard `ManifestData`,
/// but also maps chunk hashes to their lengths and offsets in the file
/// for easy reading.
pub struct PackedWriter {
file: File,
state_hashes: Vec<ChunkInfo>,
block_hashes: Vec<ChunkInfo>,
cur_len: u64,
}
impl PackedWriter {
/// Create a new "PackedWriter", to write into the file at the given path.
pub fn new(path: &Path) -> io::Result<Self> {
Ok(PackedWriter {
file: try!(File::create(path)),
state_hashes: Vec::new(),
block_hashes: Vec::new(),
cur_len: 0,
})
}
}
impl SnapshotWriter for PackedWriter {
fn write_state_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
try!(self.file.write_all(chunk));
let len = chunk.len() as u64;
self.state_hashes.push(ChunkInfo(hash, len, self.cur_len));
self.cur_len += len;
Ok(())
}
fn write_block_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
try!(self.file.write_all(chunk));
let len = chunk.len() as u64;
self.block_hashes.push(ChunkInfo(hash, len, self.cur_len));
self.cur_len += len;
Ok(())
}
fn finish(mut self, manifest: ManifestData) -> io::Result<()> {
// we ignore the hashes fields of the manifest under the assumption that
// they are consistent with ours.
let mut stream = RlpStream::new_list(5);
stream
.append(&self.state_hashes)
.append(&self.block_hashes)
.append(&manifest.state_root)
.append(&manifest.block_number)
.append(&manifest.block_hash);
let manifest_rlp = stream.out();
try!(self.file.write_all(&manifest_rlp));
let off = self.cur_len;
trace!(target: "snapshot_io", "writing manifest of len {} to offset {}", manifest_rlp.len(), off);
let off_bytes: [u8; 8] =
[
off as u8,
(off >> 8) as u8,
(off >> 16) as u8,
(off >> 24) as u8,
(off >> 32) as u8,
(off >> 40) as u8,
(off >> 48) as u8,
(off >> 56) as u8,
];
try!(self.file.write_all(&off_bytes[..]));
Ok(())
}
}
/// A "loose" writer writes chunk files into a directory.
pub struct LooseWriter {
dir: PathBuf,
}
impl LooseWriter {
/// Create a new LooseWriter which will write into the given directory,
/// creating it if it doesn't exist.
pub fn new(path: PathBuf) -> io::Result<Self> {
try!(fs::create_dir_all(&path));
Ok(LooseWriter {
dir: path,
})
}
// writing logic is the same for both kinds of chunks.
fn write_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
let mut file_path = self.dir.clone();
file_path.push(hash.hex());
let mut file = try!(File::create(file_path));
try!(file.write_all(chunk));
Ok(())
}
}
impl SnapshotWriter for LooseWriter {
fn write_state_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
self.write_chunk(hash, chunk)
}
fn write_block_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
self.write_chunk(hash, chunk)
}
fn finish(self, manifest: ManifestData) -> io::Result<()> {
let rlp = manifest.into_rlp();
let mut path = self.dir.clone();
path.push("MANIFEST");
let mut file = try!(File::create(path));
try!(file.write_all(&rlp[..]));
Ok(())
}
}
/// Something which can read compressed snapshots.
pub trait SnapshotReader {
/// Get the manifest data for this snapshot.
fn manifest(&self) -> &ManifestData;
/// Get raw chunk data by hash. implementation defined behavior
/// if a chunk not in the manifest is requested.
fn chunk(&self, hash: H256) -> io::Result<Bytes>;
}
/// Packed snapshot reader.
pub struct PackedReader {
file: File,
state_hashes: HashMap<H256, (u64, u64)>, // len, offset
block_hashes: HashMap<H256, (u64, u64)>, // len, offset
manifest: ManifestData,
}
impl PackedReader {
/// Create a new `PackedReader` for the file at the given path.
/// This will fail if any io errors are encountered or the file
/// is not a valid packed snapshot.
pub fn new(path: &Path) -> Result<Option<Self>, ::error::Error> {
let mut file = try!(File::open(path));
let file_len = try!(file.metadata()).len();
if file_len < 8
|
try!(file.seek(SeekFrom::End(-8)));
let mut off_bytes = [0u8; 8];
try!(file.read_exact(&mut off_bytes[..]));
let manifest_off: u64 =
((off_bytes[7] as u64) << 56) +
((off_bytes[6] as u64) << 48) +
((off_bytes[5] as u64) << 40) +
((off_bytes[4] as u64) << 32) +
((off_bytes[3] as u64) << 24) +
((off_bytes[2] as u64) << 16) +
((off_bytes[1] as u64) << 8) +
(off_bytes[0] as u64);
let manifest_len = file_len - manifest_off - 8;
trace!(target: "snapshot", "loading manifest of length {} from offset {}", manifest_len, manifest_off);
let mut manifest_buf = vec![0; manifest_len as usize];
try!(file.seek(SeekFrom::Start(manifest_off)));
try!(file.read_exact(&mut manifest_buf));
let rlp = UntrustedRlp::new(&manifest_buf);
let state: Vec<ChunkInfo> = try!(rlp.val_at(0));
let blocks: Vec<ChunkInfo> = try!(rlp.val_at(1));
let manifest = ManifestData {
state_hashes: state.iter().map(|c| c.0).collect(),
block_hashes: blocks.iter().map(|c| c.0).collect(),
state_root: try!(rlp.val_at(2)),
block_number: try!(rlp.val_at(3)),
block_hash: try!(rlp.val_at(4)),
};
Ok(Some(PackedReader {
file: file,
state_hashes: state.into_iter().map(|c| (c.0, (c.1, c.2))).collect(),
block_hashes: blocks.into_iter().map(|c| (c.0, (c.1, c.2))).collect(),
manifest: manifest
}))
}
}
impl SnapshotReader for PackedReader {
fn manifest(&self) -> &ManifestData {
&self.manifest
}
fn chunk(&self, hash: H256) -> io::Result<Bytes> {
let &(len, off) = self.state_hashes.get(&hash).or_else(|| self.block_hashes.get(&hash))
.expect("only chunks in the manifest can be requested; qed");
let mut file = &self.file;
try!(file.seek(SeekFrom::Start(off)));
let mut buf = vec![0; len as usize];
try!(file.read_exact(&mut buf[..]));
Ok(buf)
}
}
/// reader for "loose" snapshots
pub struct LooseReader {
dir: PathBuf,
manifest: ManifestData,
}
impl LooseReader {
/// Create a new `LooseReader` which will read the manifest and chunk data from
/// the given directory.
pub fn new(mut dir: PathBuf) -> Result<Self, ::error::Error> {
let mut manifest_buf = Vec::new();
dir.push("MANIFEST");
let mut manifest_file = try!(File::open(&dir));
try!(manifest_file.read_to_end(&mut manifest_buf));
let manifest = try!(ManifestData::from_rlp(&manifest_buf[..]));
dir.pop();
Ok(LooseReader {
dir: dir,
manifest: manifest,
})
}
}
impl SnapshotReader for LooseReader {
fn manifest(&self) -> &ManifestData {
&self.manifest
}
fn chunk(&self, hash: H256) -> io::Result<Bytes> {
let mut path = self.dir.clone();
path.push(hash.hex());
let mut buf = Vec::new();
let mut file = try!(File::open(&path));
try!(file.read_to_end(&mut buf));
Ok(buf)
}
}
#[cfg(test)]
mod tests {
use devtools::RandomTempPath;
use util::sha3::Hashable;
use snapshot::ManifestData;
use super::{SnapshotWriter, SnapshotReader, PackedWriter, PackedReader, LooseWriter, LooseReader};
const STATE_CHUNKS: &'static [&'static [u8]] = &[b"dog", b"cat", b"hello world", b"hi", b"notarealchunk"];
const BLOCK_CHUNKS: &'static [&'static [u8]] = &[b"hello!", b"goodbye!", b"abcdefg", b"hijklmnop", b"qrstuvwxy", b"and", b"z"];
#[test]
fn packed_write_and_read() {
let path = RandomTempPath::new();
let mut writer = PackedWriter::new(path.as_path()).unwrap();
let mut state_hashes = Vec::new();
let mut block_hashes = Vec::new();
for chunk in STATE_CHUNKS {
let hash = chunk.sha3();
state_hashes.push(hash.clone());
writer.write_state_chunk(hash, chunk).unwrap();
}
for chunk in BLOCK_CHUNKS {
let hash = chunk.sha3();
block_hashes.push(hash.clone());
writer.write_block_chunk(chunk.sha3(), chunk).unwrap();
}
let manifest = ManifestData {
state_hashes: state_hashes,
block_hashes: block_hashes,
state_root: b"notarealroot".sha3(),
block_number: 12345678987654321,
block_hash: b"notarealblock".sha3(),
};
writer.finish(manifest.clone()).unwrap();
let reader = PackedReader::new(path.as_path()).unwrap().unwrap();
assert_eq!(reader.manifest(), &manifest);
for hash in manifest.state_hashes.iter().chain(&manifest.block_hashes) {
reader.chunk(hash.clone()).unwrap();
}
}
#[test]
fn loose_write_and_read() {
let path = RandomTempPath::new();
let mut writer = LooseWriter::new(path.as_path().into()).unwrap();
let mut state_hashes = Vec::new();
let mut block_hashes = Vec::new();
for chunk in STATE_CHUNKS {
let hash = chunk.sha3();
state_hashes.push(hash.clone());
writer.write_state_chunk(hash, chunk).unwrap();
}
for chunk in BLOCK_CHUNKS {
let hash = chunk.sha3();
block_hashes.push(hash.clone());
writer.write_block_chunk(chunk.sha3(), chunk).unwrap();
}
let manifest = ManifestData {
state_hashes: state_hashes,
block_hashes: block_hashes,
state_root: b"notarealroot".sha3(),
block_number: 12345678987654321,
block_hash: b"notarealblock".sha3(),
};
writer.finish(manifest.clone()).unwrap();
let reader = LooseReader::new(path.as_path().into()).unwrap();
assert_eq!(reader.manifest(), &manifest);
for hash in manifest.state_hashes.iter().chain(&manifest.block_hashes) {
reader.chunk(hash.clone()).unwrap();
}
}
}
|
{
// ensure we don't seek before beginning.
return Ok(None);
}
|
conditional_block
|
io.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Snapshot i/o.
//! Ways of writing and reading snapshots. This module supports writing and reading
//! snapshots of two different formats: packed and loose.
//! Packed snapshots are written to a single file, and loose snapshots are
//! written to multiple files in one directory.
use std::collections::HashMap;
use std::io::{self, Read, Seek, SeekFrom, Write};
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use util::Bytes;
use util::hash::H256;
use rlp::{self, Encodable, RlpStream, UntrustedRlp, Stream, View};
use super::ManifestData;
/// Something which can write snapshots.
/// Writing the same chunk multiple times will lead to implementation-defined
/// behavior, and is not advised.
pub trait SnapshotWriter {
/// Write a compressed state chunk.
fn write_state_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()>;
/// Write a compressed block chunk.
fn write_block_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()>;
/// Complete writing. The manifest's chunk lists must be consistent
/// with the chunks written.
fn finish(self, manifest: ManifestData) -> io::Result<()> where Self: Sized;
}
// (hash, len, offset)
struct ChunkInfo(H256, u64, u64);
impl Encodable for ChunkInfo {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(3);
s.append(&self.0).append(&self.1).append(&self.2);
}
}
impl rlp::Decodable for ChunkInfo {
fn decode<D: rlp::Decoder>(decoder: &D) -> Result<Self, rlp::DecoderError> {
let d = decoder.as_rlp();
let hash = try!(d.val_at(0));
let len = try!(d.val_at(1));
let off = try!(d.val_at(2));
Ok(ChunkInfo(hash, len, off))
}
}
/// A packed snapshot writer. This writes snapshots to a single concatenated file.
///
/// The file format is very simple and consists of three parts:
/// [Concatenated chunk data]
/// [manifest as RLP]
/// [manifest start offset (8 bytes little-endian)]
///
/// The manifest contains all the same information as a standard `ManifestData`,
/// but also maps chunk hashes to their lengths and offsets in the file
/// for easy reading.
pub struct PackedWriter {
file: File,
state_hashes: Vec<ChunkInfo>,
block_hashes: Vec<ChunkInfo>,
cur_len: u64,
}
impl PackedWriter {
/// Create a new "PackedWriter", to write into the file at the given path.
pub fn new(path: &Path) -> io::Result<Self> {
Ok(PackedWriter {
file: try!(File::create(path)),
state_hashes: Vec::new(),
block_hashes: Vec::new(),
cur_len: 0,
})
}
}
impl SnapshotWriter for PackedWriter {
fn write_state_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
try!(self.file.write_all(chunk));
let len = chunk.len() as u64;
self.state_hashes.push(ChunkInfo(hash, len, self.cur_len));
self.cur_len += len;
Ok(())
}
fn write_block_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
try!(self.file.write_all(chunk));
let len = chunk.len() as u64;
self.block_hashes.push(ChunkInfo(hash, len, self.cur_len));
self.cur_len += len;
Ok(())
}
fn finish(mut self, manifest: ManifestData) -> io::Result<()> {
// we ignore the hashes fields of the manifest under the assumption that
// they are consistent with ours.
let mut stream = RlpStream::new_list(5);
stream
.append(&self.state_hashes)
.append(&self.block_hashes)
.append(&manifest.state_root)
.append(&manifest.block_number)
.append(&manifest.block_hash);
let manifest_rlp = stream.out();
try!(self.file.write_all(&manifest_rlp));
let off = self.cur_len;
trace!(target: "snapshot_io", "writing manifest of len {} to offset {}", manifest_rlp.len(), off);
let off_bytes: [u8; 8] =
[
off as u8,
(off >> 8) as u8,
(off >> 16) as u8,
(off >> 24) as u8,
(off >> 32) as u8,
(off >> 40) as u8,
(off >> 48) as u8,
(off >> 56) as u8,
];
try!(self.file.write_all(&off_bytes[..]));
Ok(())
}
}
/// A "loose" writer writes chunk files into a directory.
pub struct LooseWriter {
dir: PathBuf,
}
impl LooseWriter {
/// Create a new LooseWriter which will write into the given directory,
/// creating it if it doesn't exist.
pub fn new(path: PathBuf) -> io::Result<Self> {
try!(fs::create_dir_all(&path));
|
// writing logic is the same for both kinds of chunks.
fn write_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
let mut file_path = self.dir.clone();
file_path.push(hash.hex());
let mut file = try!(File::create(file_path));
try!(file.write_all(chunk));
Ok(())
}
}
impl SnapshotWriter for LooseWriter {
fn write_state_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
self.write_chunk(hash, chunk)
}
fn write_block_chunk(&mut self, hash: H256, chunk: &[u8]) -> io::Result<()> {
self.write_chunk(hash, chunk)
}
fn finish(self, manifest: ManifestData) -> io::Result<()> {
let rlp = manifest.into_rlp();
let mut path = self.dir.clone();
path.push("MANIFEST");
let mut file = try!(File::create(path));
try!(file.write_all(&rlp[..]));
Ok(())
}
}
/// Something which can read compressed snapshots.
pub trait SnapshotReader {
/// Get the manifest data for this snapshot.
fn manifest(&self) -> &ManifestData;
/// Get raw chunk data by hash. implementation defined behavior
/// if a chunk not in the manifest is requested.
fn chunk(&self, hash: H256) -> io::Result<Bytes>;
}
/// Packed snapshot reader.
pub struct PackedReader {
file: File,
state_hashes: HashMap<H256, (u64, u64)>, // len, offset
block_hashes: HashMap<H256, (u64, u64)>, // len, offset
manifest: ManifestData,
}
impl PackedReader {
/// Create a new `PackedReader` for the file at the given path.
/// This will fail if any io errors are encountered or the file
/// is not a valid packed snapshot.
pub fn new(path: &Path) -> Result<Option<Self>, ::error::Error> {
let mut file = try!(File::open(path));
let file_len = try!(file.metadata()).len();
if file_len < 8 {
// ensure we don't seek before beginning.
return Ok(None);
}
try!(file.seek(SeekFrom::End(-8)));
let mut off_bytes = [0u8; 8];
try!(file.read_exact(&mut off_bytes[..]));
let manifest_off: u64 =
((off_bytes[7] as u64) << 56) +
((off_bytes[6] as u64) << 48) +
((off_bytes[5] as u64) << 40) +
((off_bytes[4] as u64) << 32) +
((off_bytes[3] as u64) << 24) +
((off_bytes[2] as u64) << 16) +
((off_bytes[1] as u64) << 8) +
(off_bytes[0] as u64);
let manifest_len = file_len - manifest_off - 8;
trace!(target: "snapshot", "loading manifest of length {} from offset {}", manifest_len, manifest_off);
let mut manifest_buf = vec![0; manifest_len as usize];
try!(file.seek(SeekFrom::Start(manifest_off)));
try!(file.read_exact(&mut manifest_buf));
let rlp = UntrustedRlp::new(&manifest_buf);
let state: Vec<ChunkInfo> = try!(rlp.val_at(0));
let blocks: Vec<ChunkInfo> = try!(rlp.val_at(1));
let manifest = ManifestData {
state_hashes: state.iter().map(|c| c.0).collect(),
block_hashes: blocks.iter().map(|c| c.0).collect(),
state_root: try!(rlp.val_at(2)),
block_number: try!(rlp.val_at(3)),
block_hash: try!(rlp.val_at(4)),
};
Ok(Some(PackedReader {
file: file,
state_hashes: state.into_iter().map(|c| (c.0, (c.1, c.2))).collect(),
block_hashes: blocks.into_iter().map(|c| (c.0, (c.1, c.2))).collect(),
manifest: manifest
}))
}
}
impl SnapshotReader for PackedReader {
fn manifest(&self) -> &ManifestData {
&self.manifest
}
fn chunk(&self, hash: H256) -> io::Result<Bytes> {
let &(len, off) = self.state_hashes.get(&hash).or_else(|| self.block_hashes.get(&hash))
.expect("only chunks in the manifest can be requested; qed");
let mut file = &self.file;
try!(file.seek(SeekFrom::Start(off)));
let mut buf = vec![0; len as usize];
try!(file.read_exact(&mut buf[..]));
Ok(buf)
}
}
/// reader for "loose" snapshots
pub struct LooseReader {
dir: PathBuf,
manifest: ManifestData,
}
impl LooseReader {
/// Create a new `LooseReader` which will read the manifest and chunk data from
/// the given directory.
pub fn new(mut dir: PathBuf) -> Result<Self, ::error::Error> {
let mut manifest_buf = Vec::new();
dir.push("MANIFEST");
let mut manifest_file = try!(File::open(&dir));
try!(manifest_file.read_to_end(&mut manifest_buf));
let manifest = try!(ManifestData::from_rlp(&manifest_buf[..]));
dir.pop();
Ok(LooseReader {
dir: dir,
manifest: manifest,
})
}
}
impl SnapshotReader for LooseReader {
fn manifest(&self) -> &ManifestData {
&self.manifest
}
fn chunk(&self, hash: H256) -> io::Result<Bytes> {
let mut path = self.dir.clone();
path.push(hash.hex());
let mut buf = Vec::new();
let mut file = try!(File::open(&path));
try!(file.read_to_end(&mut buf));
Ok(buf)
}
}
#[cfg(test)]
mod tests {
use devtools::RandomTempPath;
use util::sha3::Hashable;
use snapshot::ManifestData;
use super::{SnapshotWriter, SnapshotReader, PackedWriter, PackedReader, LooseWriter, LooseReader};
const STATE_CHUNKS: &'static [&'static [u8]] = &[b"dog", b"cat", b"hello world", b"hi", b"notarealchunk"];
const BLOCK_CHUNKS: &'static [&'static [u8]] = &[b"hello!", b"goodbye!", b"abcdefg", b"hijklmnop", b"qrstuvwxy", b"and", b"z"];
#[test]
fn packed_write_and_read() {
let path = RandomTempPath::new();
let mut writer = PackedWriter::new(path.as_path()).unwrap();
let mut state_hashes = Vec::new();
let mut block_hashes = Vec::new();
for chunk in STATE_CHUNKS {
let hash = chunk.sha3();
state_hashes.push(hash.clone());
writer.write_state_chunk(hash, chunk).unwrap();
}
for chunk in BLOCK_CHUNKS {
let hash = chunk.sha3();
block_hashes.push(hash.clone());
writer.write_block_chunk(chunk.sha3(), chunk).unwrap();
}
let manifest = ManifestData {
state_hashes: state_hashes,
block_hashes: block_hashes,
state_root: b"notarealroot".sha3(),
block_number: 12345678987654321,
block_hash: b"notarealblock".sha3(),
};
writer.finish(manifest.clone()).unwrap();
let reader = PackedReader::new(path.as_path()).unwrap().unwrap();
assert_eq!(reader.manifest(), &manifest);
for hash in manifest.state_hashes.iter().chain(&manifest.block_hashes) {
reader.chunk(hash.clone()).unwrap();
}
}
#[test]
fn loose_write_and_read() {
let path = RandomTempPath::new();
let mut writer = LooseWriter::new(path.as_path().into()).unwrap();
let mut state_hashes = Vec::new();
let mut block_hashes = Vec::new();
for chunk in STATE_CHUNKS {
let hash = chunk.sha3();
state_hashes.push(hash.clone());
writer.write_state_chunk(hash, chunk).unwrap();
}
for chunk in BLOCK_CHUNKS {
let hash = chunk.sha3();
block_hashes.push(hash.clone());
writer.write_block_chunk(chunk.sha3(), chunk).unwrap();
}
let manifest = ManifestData {
state_hashes: state_hashes,
block_hashes: block_hashes,
state_root: b"notarealroot".sha3(),
block_number: 12345678987654321,
block_hash: b"notarealblock".sha3(),
};
writer.finish(manifest.clone()).unwrap();
let reader = LooseReader::new(path.as_path().into()).unwrap();
assert_eq!(reader.manifest(), &manifest);
for hash in manifest.state_hashes.iter().chain(&manifest.block_hashes) {
reader.chunk(hash.clone()).unwrap();
}
}
}
|
Ok(LooseWriter {
dir: path,
})
}
|
random_line_split
|
glue.rs
|
ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
source_location: Option<NodeInfo>)
-> Block<'blk, 'tcx> {
// NB: v is an *alias* of type t here, not a direct value.
debug!("drop_ty(t={})", t.repr(bcx.tcx()));
let _icx = push_ctxt("drop_ty");
if ty::type_needs_drop(bcx.tcx(), t) {
let ccx = bcx.ccx();
let glue = get_drop_glue(ccx, t);
let glue_type = get_drop_glue_type(ccx, t);
let ptr = if glue_type!= t {
PointerCast(bcx, v, type_of(ccx, glue_type).ptr_to())
} else
|
;
match source_location {
Some(sl) => debuginfo::set_source_location(bcx.fcx, sl.id, sl.span),
None => debuginfo::clear_source_location(bcx.fcx)
};
Call(bcx, glue, &[ptr], None);
}
bcx
}
pub fn drop_ty_immediate<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
source_location: Option<NodeInfo>)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("drop_ty_immediate");
let vp = alloca(bcx, type_of(bcx.ccx(), t), "");
Store(bcx, v, vp);
drop_ty(bcx, vp, t, source_location)
}
pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ValueRef {
debug!("make drop glue for {}", ppaux::ty_to_string(ccx.tcx(), t));
let t = get_drop_glue_type(ccx, t);
debug!("drop glue type {}", ppaux::ty_to_string(ccx.tcx(), t));
match ccx.drop_glues().borrow().get(&t) {
Some(&glue) => return glue,
_ => { }
}
let llty = if ty::type_is_sized(ccx.tcx(), t) {
type_of(ccx, t).ptr_to()
} else {
type_of(ccx, ty::mk_uniq(ccx.tcx(), t)).ptr_to()
};
let llfnty = Type::glue_fn(ccx, llty);
let (glue, new_sym) = match ccx.available_drop_glues().borrow().get(&t) {
Some(old_sym) => {
let glue = decl_cdecl_fn(ccx, old_sym.as_slice(), llfnty, ty::mk_nil(ccx.tcx()));
(glue, None)
},
None => {
let (sym, glue) = declare_generic_glue(ccx, t, llfnty, "drop");
(glue, Some(sym))
},
};
ccx.drop_glues().borrow_mut().insert(t, glue);
// To avoid infinite recursion, don't `make_drop_glue` until after we've
// added the entry to the `drop_glues` cache.
match new_sym {
Some(sym) => {
ccx.available_drop_glues().borrow_mut().insert(t, sym);
// We're creating a new drop glue, so also generate a body.
make_generic_glue(ccx, t, glue, make_drop_glue, "drop");
},
None => {},
}
glue
}
fn trans_struct_drop_flag<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
t: Ty<'tcx>,
v0: ValueRef,
dtor_did: ast::DefId,
class_did: ast::DefId,
substs: &subst::Substs<'tcx>)
-> Block<'blk, 'tcx> {
let repr = adt::represent_type(bcx.ccx(), t);
let struct_data = if ty::type_is_sized(bcx.tcx(), t) {
v0
} else {
let llval = GEPi(bcx, v0, &[0, abi::slice_elt_base]);
Load(bcx, llval)
};
let drop_flag = unpack_datum!(bcx, adt::trans_drop_flag_ptr(bcx, &*repr, struct_data));
with_cond(bcx, load_ty(bcx, drop_flag.val, ty::mk_bool()), |cx| {
trans_struct_drop(cx, t, v0, dtor_did, class_did, substs)
})
}
fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
t: Ty<'tcx>,
v0: ValueRef,
dtor_did: ast::DefId,
class_did: ast::DefId,
substs: &subst::Substs<'tcx>)
-> Block<'blk, 'tcx> {
let repr = adt::represent_type(bcx.ccx(), t);
// Find and call the actual destructor
let dtor_addr = get_res_dtor(bcx.ccx(), dtor_did, t,
class_did, substs);
// The first argument is the "self" argument for drop
let params = unsafe {
let ty = Type::from_ref(llvm::LLVMTypeOf(dtor_addr));
ty.element_type().func_params()
};
let fty = ty::lookup_item_type(bcx.tcx(), dtor_did).ty.subst(bcx.tcx(), substs);
let self_ty = match fty.sty {
ty::ty_bare_fn(ref f) => {
assert!(f.sig.inputs.len() == 1);
f.sig.inputs[0]
}
_ => bcx.sess().bug(format!("Expected function type, found {}",
bcx.ty_to_string(fty)).as_slice())
};
let (struct_data, info) = if ty::type_is_sized(bcx.tcx(), t) {
(v0, None)
} else {
let data = GEPi(bcx, v0, &[0, abi::slice_elt_base]);
let info = GEPi(bcx, v0, &[0, abi::slice_elt_len]);
(Load(bcx, data), Some(Load(bcx, info)))
};
adt::fold_variants(bcx, &*repr, struct_data, |variant_cx, st, value| {
// Be sure to put all of the fields into a scope so we can use an invoke
// instruction to call the user destructor but still call the field
// destructors if the user destructor panics.
let field_scope = variant_cx.fcx.push_custom_cleanup_scope();
// Class dtors have no explicit args, so the params should
// just consist of the environment (self).
assert_eq!(params.len(), 1);
let self_arg = if ty::type_is_fat_ptr(bcx.tcx(), self_ty) {
// The dtor expects a fat pointer, so make one, even if we have to fake it.
let boxed_ty = ty::mk_open(bcx.tcx(), t);
let scratch = datum::rvalue_scratch_datum(bcx, boxed_ty, "__fat_ptr_drop_self");
Store(bcx, value, GEPi(bcx, scratch.val, &[0, abi::slice_elt_base]));
Store(bcx,
// If we just had a thin pointer, make a fat pointer by sticking
// null where we put the unsizing info. This works because t
// is a sized type, so we will only unpack the fat pointer, never
// use the fake info.
info.unwrap_or(C_null(Type::i8p(bcx.ccx()))),
GEPi(bcx, scratch.val, &[0, abi::slice_elt_len]));
PointerCast(variant_cx, scratch.val, params[0])
} else {
PointerCast(variant_cx, value, params[0])
};
let args = vec!(self_arg);
// Add all the fields as a value which needs to be cleaned at the end of
// this scope. Iterate in reverse order so a Drop impl doesn't reverse
// the order in which fields get dropped.
for (i, ty) in st.fields.iter().enumerate().rev() {
let llfld_a = adt::struct_field_ptr(variant_cx, &*st, value, i, false);
let val = if ty::type_is_sized(bcx.tcx(), *ty) {
llfld_a
} else {
let boxed_ty = ty::mk_open(bcx.tcx(), *ty);
let scratch = datum::rvalue_scratch_datum(bcx, boxed_ty, "__fat_ptr_drop_field");
Store(bcx, llfld_a, GEPi(bcx, scratch.val, &[0, abi::slice_elt_base]));
Store(bcx, info.unwrap(), GEPi(bcx, scratch.val, &[0, abi::slice_elt_len]));
scratch.val
};
variant_cx.fcx.schedule_drop_mem(cleanup::CustomScope(field_scope),
val, *ty);
}
let dtor_ty = ty::mk_ctor_fn(bcx.tcx(),
&[get_drop_glue_type(bcx.ccx(), t)],
ty::mk_nil(bcx.tcx()));
let (_, variant_cx) = invoke(variant_cx, dtor_addr, args, dtor_ty, None, false);
variant_cx.fcx.pop_and_trans_custom_cleanup_scope(variant_cx, field_scope);
variant_cx
})
}
fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info: ValueRef)
-> (ValueRef, ValueRef) {
debug!("calculate size of DST: {}; with lost info: {}",
bcx.ty_to_string(t), bcx.val_to_string(info));
if ty::type_is_sized(bcx.tcx(), t) {
let sizing_type = sizing_type_of(bcx.ccx(), t);
let size = C_uint(bcx.ccx(), llsize_of_alloc(bcx.ccx(), sizing_type));
let align = C_uint(bcx.ccx(), align_of(bcx.ccx(), t));
return (size, align);
}
match t.sty {
ty::ty_struct(id, ref substs) => {
let ccx = bcx.ccx();
// First get the size of all statically known fields.
// Don't use type_of::sizing_type_of because that expects t to be sized.
assert!(!ty::type_is_simd(bcx.tcx(), t));
let repr = adt::represent_type(ccx, t);
let sizing_type = adt::sizing_type_of(ccx, &*repr, true);
let sized_size = C_uint(ccx, llsize_of_alloc(ccx, sizing_type));
let sized_align = C_uint(ccx, llalign_of_min(ccx, sizing_type));
// Recurse to get the size of the dynamically sized field (must be
// the last field).
let fields = ty::struct_fields(bcx.tcx(), id, substs);
let last_field = fields[fields.len()-1];
let field_ty = last_field.mt.ty;
let (unsized_size, unsized_align) = size_and_align_of_dst(bcx, field_ty, info);
// Return the sum of sizes and max of aligns.
let size = Add(bcx, sized_size, unsized_size);
let align = Select(bcx,
ICmp(bcx, llvm::IntULT, sized_align, unsized_align),
sized_align,
unsized_align);
(size, align)
}
ty::ty_trait(..) => {
// info points to the vtable and the second entry in the vtable is the
// dynamic size of the object.
let info = PointerCast(bcx, info, Type::int(bcx.ccx()).ptr_to());
let size_ptr = GEPi(bcx, info, &[1u]);
let align_ptr = GEPi(bcx, info, &[2u]);
(Load(bcx, size_ptr), Load(bcx, align_ptr))
}
ty::ty_vec(unit_ty, None) => {
// The info in this case is the length of the vec, so the size is that
// times the unit size.
let llunit_ty = sizing_type_of(bcx.ccx(), unit_ty);
let unit_size = llsize_of_alloc(bcx.ccx(), llunit_ty);
(Mul(bcx, info, C_uint(bcx.ccx(), unit_size)), C_uint(bcx.ccx(), 8u))
}
_ => bcx.sess().bug(format!("Unexpected unsized type, found {}",
bcx.ty_to_string(t)).as_slice())
}
}
fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>)
-> Block<'blk, 'tcx> {
// NB: v0 is an *alias* of type t here, not a direct value.
let _icx = push_ctxt("make_drop_glue");
match t.sty {
ty::ty_uniq(content_ty) => {
match content_ty.sty {
ty::ty_vec(ty, None) => {
tvec::make_drop_glue_unboxed(bcx, v0, ty, true)
}
ty::ty_str => {
let unit_ty = ty::sequence_element_type(bcx.tcx(), content_ty);
tvec::make_drop_glue_unboxed(bcx, v0, unit_ty, true)
}
ty::ty_trait(..) => {
let lluniquevalue = GEPi(bcx, v0, &[0, abi::trt_field_box]);
// Only drop the value when it is non-null
let concrete_ptr = Load(bcx, lluniquevalue);
with_cond(bcx, IsNotNull(bcx, concrete_ptr), |bcx| {
let dtor_ptr = Load(bcx, GEPi(bcx, v0, &[0, abi::trt_field_vtable]));
let dtor = Load(bcx, dtor_ptr);
Call(bcx,
dtor,
&[PointerCast(bcx, lluniquevalue, Type::i8p(bcx.ccx()))],
None);
bcx
})
}
ty::ty_struct(..) if!ty::type_is_sized(bcx.tcx(), content_ty) => {
let llval = GEPi(bcx, v0, &[0, abi::slice_elt_base]);
let llbox = Load(bcx, llval);
let not_null = IsNotNull(bcx, llbox);
with_cond(bcx, not_null, |bcx| {
let bcx = drop_ty(bcx, v0, content_ty, None);
let info = GEPi(bcx, v0, &[0, abi::slice_elt_len]);
let info = Load(bcx, info);
let (llsize, llalign) = size_and_align_of_dst(bcx, content_ty, info);
trans_exchange_free_dyn(bcx, llbox, llsize, llalign)
})
}
_ => {
assert!(ty::type_is_sized(bcx.tcx(), content_ty));
let llval = v0;
let llbox = Load(bcx, llval);
let not_null = IsNotNull(bcx, llbox);
with_cond(bcx, not_null, |bcx| {
let bcx = drop_ty(bcx, llbox, content_ty, None);
trans_exchange_free_ty(bcx, llbox, content_ty)
})
}
}
}
ty::ty_struct(did, ref substs) | ty::ty_enum(did, ref substs) => {
let tcx = bcx.tcx();
match ty::ty_dtor(tcx, did) {
ty::TraitDtor(dtor, true) => {
// FIXME(16758) Since the struct is unsized, it is hard to
// find the drop flag (which is at the end of the struct).
// Lets just ignore the flag and pretend everything will be
// OK.
if ty::type_is_sized(bcx.tcx(), t) {
trans_struct_drop_flag(bcx, t, v0, dtor, did, substs)
} else {
// Give the user a heads up that we are doing something
// stupid and dangerous.
bcx.sess().warn(format!("Ignoring drop flag in destructor for {}\
because the struct is unsized. See issue\
#16758",
bcx.ty_to_string(t)).as_slice());
trans_struct_drop(bcx, t, v0, dtor, did, substs)
}
}
ty::TraitDtor(dtor, false) => {
trans_struct_drop(bcx, t, v0, dtor, did, substs)
}
ty::NoDtor => {
// No dtor
|
{
v
}
|
conditional_block
|
glue.rs
|
<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ptr: ValueRef,
content_ty: Ty<'tcx>) -> Block<'blk, 'tcx> {
assert!(ty::type_is_sized(bcx.ccx().tcx(), content_ty));
let sizing_type = sizing_type_of(bcx.ccx(), content_ty);
let content_size = llsize_of_alloc(bcx.ccx(), sizing_type);
// `Box<ZeroSizeType>` does not allocate.
if content_size!= 0 {
let content_align = align_of(bcx.ccx(), content_ty);
trans_exchange_free(bcx, ptr, content_size, content_align)
} else {
bcx
}
}
pub fn get_drop_glue_type<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
t: Ty<'tcx>) -> Ty<'tcx> {
let tcx = ccx.tcx();
// Even if there is no dtor for t, there might be one deeper down and we
// might need to pass in the vtable ptr.
if!ty::type_is_sized(tcx, t) {
return t
}
if!ty::type_needs_drop(tcx, t) {
return ty::mk_i8();
}
match t.sty {
ty::ty_uniq(typ) if!ty::type_needs_drop(tcx, typ)
&& ty::type_is_sized(tcx, typ) => {
let llty = sizing_type_of(ccx, typ);
// `Box<ZeroSizeType>` does not allocate.
if llsize_of_alloc(ccx, llty) == 0 {
ty::mk_i8()
} else {
t
}
}
_ => t
}
}
pub fn drop_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
source_location: Option<NodeInfo>)
-> Block<'blk, 'tcx> {
// NB: v is an *alias* of type t here, not a direct value.
debug!("drop_ty(t={})", t.repr(bcx.tcx()));
let _icx = push_ctxt("drop_ty");
if ty::type_needs_drop(bcx.tcx(), t) {
let ccx = bcx.ccx();
let glue = get_drop_glue(ccx, t);
let glue_type = get_drop_glue_type(ccx, t);
let ptr = if glue_type!= t {
PointerCast(bcx, v, type_of(ccx, glue_type).ptr_to())
} else {
v
};
match source_location {
Some(sl) => debuginfo::set_source_location(bcx.fcx, sl.id, sl.span),
None => debuginfo::clear_source_location(bcx.fcx)
};
Call(bcx, glue, &[ptr], None);
}
bcx
}
pub fn drop_ty_immediate<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
source_location: Option<NodeInfo>)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("drop_ty_immediate");
let vp = alloca(bcx, type_of(bcx.ccx(), t), "");
Store(bcx, v, vp);
drop_ty(bcx, vp, t, source_location)
}
pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ValueRef {
debug!("make drop glue for {}", ppaux::ty_to_string(ccx.tcx(), t));
let t = get_drop_glue_type(ccx, t);
debug!("drop glue type {}", ppaux::ty_to_string(ccx.tcx(), t));
match ccx.drop_glues().borrow().get(&t) {
Some(&glue) => return glue,
_ => { }
}
let llty = if ty::type_is_sized(ccx.tcx(), t) {
type_of(ccx, t).ptr_to()
} else {
type_of(ccx, ty::mk_uniq(ccx.tcx(), t)).ptr_to()
};
let llfnty = Type::glue_fn(ccx, llty);
let (glue, new_sym) = match ccx.available_drop_glues().borrow().get(&t) {
Some(old_sym) => {
let glue = decl_cdecl_fn(ccx, old_sym.as_slice(), llfnty, ty::mk_nil(ccx.tcx()));
(glue, None)
},
None => {
let (sym, glue) = declare_generic_glue(ccx, t, llfnty, "drop");
(glue, Some(sym))
},
};
ccx.drop_glues().borrow_mut().insert(t, glue);
// To avoid infinite recursion, don't `make_drop_glue` until after we've
// added the entry to the `drop_glues` cache.
match new_sym {
Some(sym) => {
ccx.available_drop_glues().borrow_mut().insert(t, sym);
// We're creating a new drop glue, so also generate a body.
make_generic_glue(ccx, t, glue, make_drop_glue, "drop");
},
None => {},
}
glue
}
fn trans_struct_drop_flag<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
t: Ty<'tcx>,
v0: ValueRef,
dtor_did: ast::DefId,
class_did: ast::DefId,
substs: &subst::Substs<'tcx>)
-> Block<'blk, 'tcx> {
let repr = adt::represent_type(bcx.ccx(), t);
let struct_data = if ty::type_is_sized(bcx.tcx(), t) {
v0
} else {
let llval = GEPi(bcx, v0, &[0, abi::slice_elt_base]);
Load(bcx, llval)
};
let drop_flag = unpack_datum!(bcx, adt::trans_drop_flag_ptr(bcx, &*repr, struct_data));
with_cond(bcx, load_ty(bcx, drop_flag.val, ty::mk_bool()), |cx| {
trans_struct_drop(cx, t, v0, dtor_did, class_did, substs)
})
}
fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
t: Ty<'tcx>,
v0: ValueRef,
dtor_did: ast::DefId,
class_did: ast::DefId,
substs: &subst::Substs<'tcx>)
-> Block<'blk, 'tcx> {
let repr = adt::represent_type(bcx.ccx(), t);
// Find and call the actual destructor
let dtor_addr = get_res_dtor(bcx.ccx(), dtor_did, t,
class_did, substs);
// The first argument is the "self" argument for drop
let params = unsafe {
let ty = Type::from_ref(llvm::LLVMTypeOf(dtor_addr));
ty.element_type().func_params()
};
let fty = ty::lookup_item_type(bcx.tcx(), dtor_did).ty.subst(bcx.tcx(), substs);
let self_ty = match fty.sty {
ty::ty_bare_fn(ref f) => {
assert!(f.sig.inputs.len() == 1);
f.sig.inputs[0]
}
_ => bcx.sess().bug(format!("Expected function type, found {}",
bcx.ty_to_string(fty)).as_slice())
};
let (struct_data, info) = if ty::type_is_sized(bcx.tcx(), t) {
(v0, None)
} else {
let data = GEPi(bcx, v0, &[0, abi::slice_elt_base]);
let info = GEPi(bcx, v0, &[0, abi::slice_elt_len]);
(Load(bcx, data), Some(Load(bcx, info)))
};
adt::fold_variants(bcx, &*repr, struct_data, |variant_cx, st, value| {
// Be sure to put all of the fields into a scope so we can use an invoke
// instruction to call the user destructor but still call the field
// destructors if the user destructor panics.
let field_scope = variant_cx.fcx.push_custom_cleanup_scope();
// Class dtors have no explicit args, so the params should
// just consist of the environment (self).
assert_eq!(params.len(), 1);
let self_arg = if ty::type_is_fat_ptr(bcx.tcx(), self_ty) {
// The dtor expects a fat pointer, so make one, even if we have to fake it.
let boxed_ty = ty::mk_open(bcx.tcx(), t);
let scratch = datum::rvalue_scratch_datum(bcx, boxed_ty, "__fat_ptr_drop_self");
Store(bcx, value, GEPi(bcx, scratch.val, &[0, abi::slice_elt_base]));
Store(bcx,
// If we just had a thin pointer, make a fat pointer by sticking
// null where we put the unsizing info. This works because t
// is a sized type, so we will only unpack the fat pointer, never
// use the fake info.
info.unwrap_or(C_null(Type::i8p(bcx.ccx()))),
GEPi(bcx, scratch.val, &[0, abi::slice_elt_len]));
PointerCast(variant_cx, scratch.val, params[0])
} else {
PointerCast(variant_cx, value, params[0])
};
let args = vec!(self_arg);
// Add all the fields as a value which needs to be cleaned at the end of
// this scope. Iterate in reverse order so a Drop impl doesn't reverse
// the order in which fields get dropped.
for (i, ty) in st.fields.iter().enumerate().rev() {
let llfld_a = adt::struct_field_ptr(variant_cx, &*st, value, i, false);
let val = if ty::type_is_sized(bcx.tcx(), *ty) {
llfld_a
} else {
let boxed_ty = ty::mk_open(bcx.tcx(), *ty);
let scratch = datum::rvalue_scratch_datum(bcx, boxed_ty, "__fat_ptr_drop_field");
Store(bcx, llfld_a, GEPi(bcx, scratch.val, &[0, abi::slice_elt_base]));
Store(bcx, info.unwrap(), GEPi(bcx, scratch.val, &[0, abi::slice_elt_len]));
scratch.val
};
variant_cx.fcx.schedule_drop_mem(cleanup::CustomScope(field_scope),
val, *ty);
}
let dtor_ty = ty::mk_ctor_fn(bcx.tcx(),
&[get_drop_glue_type(bcx.ccx(), t)],
ty::mk_nil(bcx.tcx()));
let (_, variant_cx) = invoke(variant_cx, dtor_addr, args, dtor_ty, None, false);
variant_cx.fcx.pop_and_trans_custom_cleanup_scope(variant_cx, field_scope);
variant_cx
})
}
fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info: ValueRef)
-> (ValueRef, ValueRef) {
debug!("calculate size of DST: {}; with lost info: {}",
bcx.ty_to_string(t), bcx.val_to_string(info));
if ty::type_is_sized(bcx.tcx(), t) {
let sizing_type = sizing_type_of(bcx.ccx(), t);
let size = C_uint(bcx.ccx(), llsize_of_alloc(bcx.ccx(), sizing_type));
let align = C_uint(bcx.ccx(), align_of(bcx.ccx(), t));
return (size, align);
}
match t.sty {
ty::ty_struct(id, ref substs) => {
let ccx = bcx.ccx();
// First get the size of all statically known fields.
// Don't use type_of::sizing_type_of because that expects t to be sized.
assert!(!ty::type_is_simd(bcx.tcx(), t));
let repr = adt::represent_type(ccx, t);
let sizing_type = adt::sizing_type_of(ccx, &*repr, true);
let sized_size = C_uint(ccx, llsize_of_alloc(ccx, sizing_type));
let sized_align = C_uint(ccx, llalign_of_min(ccx, sizing_type));
// Recurse to get the size of the dynamically sized field (must be
// the last field).
let fields = ty::struct_fields(bcx.tcx(), id, substs);
let last_field = fields[fields.len()-1];
let field_ty = last_field.mt.ty;
let (unsized_size, unsized_align) = size_and_align_of_dst(bcx, field_ty, info);
// Return the sum of sizes and max of aligns.
let size = Add(bcx, sized_size, unsized_size);
let align = Select(bcx,
ICmp(bcx, llvm::IntULT, sized_align, unsized_align),
sized_align,
unsized_align);
(size, align)
}
ty::ty_trait(..) => {
// info points to the vtable and the second entry in the vtable is the
// dynamic size of the object.
let info = PointerCast(bcx, info, Type::int(bcx.ccx()).ptr_to());
let size_ptr = GEPi(bcx, info, &[1u]);
let align_ptr = GEPi(bcx, info, &[2u]);
(Load(bcx, size_ptr), Load(bcx, align_ptr))
}
ty::ty_vec(unit_ty, None) => {
// The info in this case is the length of the vec, so the size is that
// times the unit size.
let llunit_ty = sizing_type_of(bcx.ccx(), unit_ty);
let unit_size = llsize_of_alloc(bcx.ccx(), llunit_ty);
(Mul(bcx, info, C_uint(bcx.ccx(), unit_size)), C_uint(bcx.ccx(), 8u))
}
_ => bcx.sess().bug(format!("Unexpected unsized type, found {}",
bcx.ty_to_string(t)).as_slice())
}
}
fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>)
-> Block<'blk, 'tcx> {
// NB: v0 is an *alias* of type t here, not a direct value.
let _icx = push_ctxt("make_drop_glue");
match t.sty {
ty::ty_uniq(content_ty) => {
match content_ty.sty {
ty::ty_vec(ty, None) => {
tvec::make_drop_glue_unboxed(bcx, v0, ty, true)
}
ty::ty_str => {
let unit_ty = ty::sequence_element_type(bcx.tcx(), content_ty);
tvec::make_drop_glue_unboxed(bcx, v0, unit_ty, true)
}
ty::ty_trait(..) => {
let lluniquevalue = GEPi(bcx, v0, &[0, abi::trt_field_box]);
// Only drop the value when it is non-null
let concrete_ptr = Load(bcx, lluniquevalue);
with_cond(bcx, IsNotNull(bcx, concrete_ptr), |bcx| {
let dtor_ptr = Load(bcx, GEPi(bcx, v0, &[0, abi::trt_field_vtable]));
let dtor = Load(bcx, dtor_ptr);
Call(bcx,
dtor,
&[PointerCast(bcx, lluniquevalue, Type::i8p(bcx.ccx()))],
None);
bcx
})
}
ty::ty_struct(..) if!ty::type_is_sized(bcx.tcx(), content_ty) => {
let llval = GEPi(bcx, v0, &[0, abi::slice_elt_base]);
let llbox = Load(bcx, llval);
let not_null = IsNotNull(bcx, llbox);
with_cond(bcx, not_null, |bcx| {
let bcx = drop_ty(bcx, v0, content_ty, None);
let info = GEPi(bcx, v0, &[0, abi::slice_elt_len]);
let info = Load(bcx, info);
let (llsize, llalign) = size_and_align_of_dst(bcx, content_ty, info);
trans_exchange_free_dyn(bcx, llbox, llsize, llalign)
})
}
_ => {
assert!(ty::type_is_sized(bcx.tcx(), content_ty));
let llval = v0;
let llbox = Load(bcx, llval);
let not_null = IsNotNull(bcx, llbox);
with_cond(bcx, not_null, |bcx| {
let bcx = drop_ty(bcx, llbox, content_ty, None);
trans_exchange_free_ty(bcx, llbox, content_ty)
})
}
}
}
ty::ty_struct(did, ref substs) | ty::ty_enum(did, ref substs) => {
let tcx = bcx.tcx();
match ty::ty_dtor(tcx, did) {
ty::TraitDtor(dtor, true) => {
|
trans_exchange_free_ty
|
identifier_name
|
|
glue.rs
|
use middle::subst;
use middle::subst::{Subst, Substs};
use trans::adt;
use trans::base::*;
use trans::build::*;
use trans::callee;
use trans::cleanup;
use trans::cleanup::CleanupMethods;
use trans::common::*;
use trans::datum;
use trans::debuginfo;
use trans::expr;
use trans::machine::*;
use trans::tvec;
use trans::type_::Type;
use trans::type_of::{type_of, sizing_type_of, align_of};
use middle::ty::{mod, Ty};
use util::ppaux::{ty_to_short_str, Repr};
use util::ppaux;
use arena::TypedArena;
use std::c_str::ToCStr;
use libc::c_uint;
use syntax::ast;
use syntax::parse::token;
pub fn trans_exchange_free_dyn<'blk, 'tcx>(cx: Block<'blk, 'tcx>, v: ValueRef,
size: ValueRef, align: ValueRef)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("trans_exchange_free");
let ccx = cx.ccx();
callee::trans_lang_call(cx,
langcall(cx, None, "", ExchangeFreeFnLangItem),
&[PointerCast(cx, v, Type::i8p(ccx)), size, align],
Some(expr::Ignore)).bcx
}
pub fn trans_exchange_free<'blk, 'tcx>(cx: Block<'blk, 'tcx>, v: ValueRef,
size: u64, align: u32) -> Block<'blk, 'tcx> {
trans_exchange_free_dyn(cx, v, C_uint(cx.ccx(), size),
C_uint(cx.ccx(), align))
}
pub fn trans_exchange_free_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ptr: ValueRef,
content_ty: Ty<'tcx>) -> Block<'blk, 'tcx> {
assert!(ty::type_is_sized(bcx.ccx().tcx(), content_ty));
let sizing_type = sizing_type_of(bcx.ccx(), content_ty);
let content_size = llsize_of_alloc(bcx.ccx(), sizing_type);
// `Box<ZeroSizeType>` does not allocate.
if content_size!= 0 {
let content_align = align_of(bcx.ccx(), content_ty);
trans_exchange_free(bcx, ptr, content_size, content_align)
} else {
bcx
}
}
pub fn get_drop_glue_type<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
t: Ty<'tcx>) -> Ty<'tcx> {
let tcx = ccx.tcx();
// Even if there is no dtor for t, there might be one deeper down and we
// might need to pass in the vtable ptr.
if!ty::type_is_sized(tcx, t) {
return t
}
if!ty::type_needs_drop(tcx, t) {
return ty::mk_i8();
}
match t.sty {
ty::ty_uniq(typ) if!ty::type_needs_drop(tcx, typ)
&& ty::type_is_sized(tcx, typ) => {
let llty = sizing_type_of(ccx, typ);
// `Box<ZeroSizeType>` does not allocate.
if llsize_of_alloc(ccx, llty) == 0 {
ty::mk_i8()
} else {
t
}
}
_ => t
}
}
pub fn drop_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
source_location: Option<NodeInfo>)
-> Block<'blk, 'tcx> {
// NB: v is an *alias* of type t here, not a direct value.
debug!("drop_ty(t={})", t.repr(bcx.tcx()));
let _icx = push_ctxt("drop_ty");
if ty::type_needs_drop(bcx.tcx(), t) {
let ccx = bcx.ccx();
let glue = get_drop_glue(ccx, t);
let glue_type = get_drop_glue_type(ccx, t);
let ptr = if glue_type!= t {
PointerCast(bcx, v, type_of(ccx, glue_type).ptr_to())
} else {
v
};
match source_location {
Some(sl) => debuginfo::set_source_location(bcx.fcx, sl.id, sl.span),
None => debuginfo::clear_source_location(bcx.fcx)
};
Call(bcx, glue, &[ptr], None);
}
bcx
}
pub fn drop_ty_immediate<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
source_location: Option<NodeInfo>)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("drop_ty_immediate");
let vp = alloca(bcx, type_of(bcx.ccx(), t), "");
Store(bcx, v, vp);
drop_ty(bcx, vp, t, source_location)
}
pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ValueRef {
debug!("make drop glue for {}", ppaux::ty_to_string(ccx.tcx(), t));
let t = get_drop_glue_type(ccx, t);
debug!("drop glue type {}", ppaux::ty_to_string(ccx.tcx(), t));
match ccx.drop_glues().borrow().get(&t) {
Some(&glue) => return glue,
_ => { }
}
let llty = if ty::type_is_sized(ccx.tcx(), t) {
type_of(ccx, t).ptr_to()
} else {
type_of(ccx, ty::mk_uniq(ccx.tcx(), t)).ptr_to()
};
let llfnty = Type::glue_fn(ccx, llty);
let (glue, new_sym) = match ccx.available_drop_glues().borrow().get(&t) {
Some(old_sym) => {
let glue = decl_cdecl_fn(ccx, old_sym.as_slice(), llfnty, ty::mk_nil(ccx.tcx()));
(glue, None)
},
None => {
let (sym, glue) = declare_generic_glue(ccx, t, llfnty, "drop");
(glue, Some(sym))
},
};
ccx.drop_glues().borrow_mut().insert(t, glue);
// To avoid infinite recursion, don't `make_drop_glue` until after we've
// added the entry to the `drop_glues` cache.
match new_sym {
Some(sym) => {
ccx.available_drop_glues().borrow_mut().insert(t, sym);
// We're creating a new drop glue, so also generate a body.
make_generic_glue(ccx, t, glue, make_drop_glue, "drop");
},
None => {},
}
glue
}
fn trans_struct_drop_flag<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
t: Ty<'tcx>,
v0: ValueRef,
dtor_did: ast::DefId,
class_did: ast::DefId,
substs: &subst::Substs<'tcx>)
-> Block<'blk, 'tcx> {
let repr = adt::represent_type(bcx.ccx(), t);
let struct_data = if ty::type_is_sized(bcx.tcx(), t) {
v0
} else {
let llval = GEPi(bcx, v0, &[0, abi::slice_elt_base]);
Load(bcx, llval)
};
let drop_flag = unpack_datum!(bcx, adt::trans_drop_flag_ptr(bcx, &*repr, struct_data));
with_cond(bcx, load_ty(bcx, drop_flag.val, ty::mk_bool()), |cx| {
trans_struct_drop(cx, t, v0, dtor_did, class_did, substs)
})
}
fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
t: Ty<'tcx>,
v0: ValueRef,
dtor_did: ast::DefId,
class_did: ast::DefId,
substs: &subst::Substs<'tcx>)
-> Block<'blk, 'tcx> {
let repr = adt::represent_type(bcx.ccx(), t);
// Find and call the actual destructor
let dtor_addr = get_res_dtor(bcx.ccx(), dtor_did, t,
class_did, substs);
// The first argument is the "self" argument for drop
let params = unsafe {
let ty = Type::from_ref(llvm::LLVMTypeOf(dtor_addr));
ty.element_type().func_params()
};
let fty = ty::lookup_item_type(bcx.tcx(), dtor_did).ty.subst(bcx.tcx(), substs);
let self_ty = match fty.sty {
ty::ty_bare_fn(ref f) => {
assert!(f.sig.inputs.len() == 1);
f.sig.inputs[0]
}
_ => bcx.sess().bug(format!("Expected function type, found {}",
bcx.ty_to_string(fty)).as_slice())
};
let (struct_data, info) = if ty::type_is_sized(bcx.tcx(), t) {
(v0, None)
} else {
let data = GEPi(bcx, v0, &[0, abi::slice_elt_base]);
let info = GEPi(bcx, v0, &[0, abi::slice_elt_len]);
(Load(bcx, data), Some(Load(bcx, info)))
};
adt::fold_variants(bcx, &*repr, struct_data, |variant_cx, st, value| {
// Be sure to put all of the fields into a scope so we can use an invoke
// instruction to call the user destructor but still call the field
// destructors if the user destructor panics.
let field_scope = variant_cx.fcx.push_custom_cleanup_scope();
// Class dtors have no explicit args, so the params should
// just consist of the environment (self).
assert_eq!(params.len(), 1);
let self_arg = if ty::type_is_fat_ptr(bcx.tcx(), self_ty) {
// The dtor expects a fat pointer, so make one, even if we have to fake it.
let boxed_ty = ty::mk_open(bcx.tcx(), t);
let scratch = datum::rvalue_scratch_datum(bcx, boxed_ty, "__fat_ptr_drop_self");
Store(bcx, value, GEPi(bcx, scratch.val, &[0, abi::slice_elt_base]));
Store(bcx,
// If we just had a thin pointer, make a fat pointer by sticking
// null where we put the unsizing info. This works because t
// is a sized type, so we will only unpack the fat pointer, never
// use the fake info.
info.unwrap_or(C_null(Type::i8p(bcx.ccx()))),
GEPi(bcx, scratch.val, &[0, abi::slice_elt_len]));
PointerCast(variant_cx, scratch.val, params[0])
} else {
PointerCast(variant_cx, value, params[0])
};
let args = vec!(self_arg);
// Add all the fields as a value which needs to be cleaned at the end of
// this scope. Iterate in reverse order so a Drop impl doesn't reverse
// the order in which fields get dropped.
for (i, ty) in st.fields.iter().enumerate().rev() {
let llfld_a = adt::struct_field_ptr(variant_cx, &*st, value, i, false);
let val = if ty::type_is_sized(bcx.tcx(), *ty) {
llfld_a
} else {
let boxed_ty = ty::mk_open(bcx.tcx(), *ty);
let scratch = datum::rvalue_scratch_datum(bcx, boxed_ty, "__fat_ptr_drop_field");
Store(bcx, llfld_a, GEPi(bcx, scratch.val, &[0, abi::slice_elt_base]));
Store(bcx, info.unwrap(), GEPi(bcx, scratch.val, &[0, abi::slice_elt_len]));
scratch.val
};
variant_cx.fcx.schedule_drop_mem(cleanup::CustomScope(field_scope),
val, *ty);
}
let dtor_ty = ty::mk_ctor_fn(bcx.tcx(),
&[get_drop_glue_type(bcx.ccx(), t)],
ty::mk_nil(bcx.tcx()));
let (_, variant_cx) = invoke(variant_cx, dtor_addr, args, dtor_ty, None, false);
variant_cx.fcx.pop_and_trans_custom_cleanup_scope(variant_cx, field_scope);
variant_cx
})
}
fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info: ValueRef)
-> (ValueRef, ValueRef) {
debug!("calculate size of DST: {}; with lost info: {}",
bcx.ty_to_string(t), bcx.val_to_string(info));
if ty::type_is_sized(bcx.tcx(), t) {
let sizing_type = sizing_type_of(bcx.ccx(), t);
let size = C_uint(bcx.ccx(), llsize_of_alloc(bcx.ccx(), sizing_type));
let align = C_uint(bcx.ccx(), align_of(bcx.ccx(), t));
return (size, align);
}
match t.sty {
ty::ty_struct(id, ref substs) => {
let ccx = bcx.ccx();
// First get the size of all statically known fields.
// Don't use type_of::sizing_type_of because that expects t to be sized.
assert!(!ty::type_is_simd(bcx.tcx(), t));
let repr = adt::represent_type(ccx, t);
let sizing_type = adt::sizing_type_of(ccx, &*repr, true);
let sized_size = C_uint(ccx, llsize_of_alloc(ccx, sizing_type));
let sized_align = C_uint(ccx, llalign_of_min(ccx, sizing_type));
// Recurse to get the size of the dynamically sized field (must be
// the last field).
let fields = ty::struct_fields(bcx.tcx(), id, substs);
let last_field = fields[fields.len()-1];
let field_ty = last_field.mt.ty;
let (unsized_size, unsized_align) = size_and_align_of_dst(bcx, field_ty, info);
// Return the sum of sizes and max of aligns.
let size = Add(bcx, sized_size, unsized_size);
let align = Select(bcx,
ICmp(bcx, llvm::IntULT, sized_align, unsized_align),
sized_align,
unsized_align);
(size, align)
}
ty::ty_trait(..) => {
// info points to the vtable and the second entry in the vtable is the
// dynamic size of the object.
let info = PointerCast(bcx, info, Type::int(bcx.ccx()).ptr_to());
let size_ptr = GEPi(bcx, info, &[1u]);
let align_ptr = GEPi(bcx, info, &[2u]);
(Load(bcx, size_ptr), Load(bcx, align_ptr))
}
ty::ty_vec(unit_ty, None) => {
// The info in this case is the length of the vec, so the size is that
// times the unit size.
let llunit_ty = sizing_type_of(bcx.ccx(), unit_ty);
let unit_size = llsize_of_alloc(bcx.ccx(), llunit_ty);
(Mul(bcx, info, C_uint(bcx.ccx(), unit_size)), C_uint(bcx.ccx(), 8u))
}
_ => bcx.sess().bug(format!("Unexpected unsized type, found {}",
bcx.ty_to_string(t)).as_slice())
}
}
fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>)
-> Block<'blk, 'tcx> {
// NB: v0 is an *alias* of type t here, not a direct value.
let _icx = push_ctxt("make_drop_glue");
match t.sty {
ty::ty_uniq(content_ty) => {
match content_ty.sty {
ty::ty_vec(ty, None) => {
tvec::make_drop_glue_unboxed(bcx, v0, ty, true)
}
ty::ty_str => {
let unit_ty = ty::sequence_element_type(bcx.tcx(), content_ty);
tvec::make_drop_glue_unboxed(bcx, v0, unit_ty, true)
}
ty::ty_trait(..) => {
let lluniquevalue = GEPi(bcx, v0, &[0, abi::trt_field_box]);
// Only drop the value when it is non-null
let concrete_ptr = Load(bcx, lluniquevalue);
with_cond(bcx, IsNotNull(bcx, concrete_ptr), |bcx| {
let dtor_ptr = Load(bcx, GEPi(bcx, v0, &[0, abi::trt_field_vtable]));
let dtor = Load(bcx, dtor_ptr);
Call(bcx,
dtor,
&[PointerCast(bcx, lluniquevalue, Type::i8p(bcx.ccx()))],
None);
bcx
})
}
ty::ty_struct(..) if!ty::type_is_sized(bcx.tcx(), content_ty) => {
let llval = GEPi(bcx, v0, &[0, abi::slice_elt_base]);
let llbox = Load(bcx, llval);
let not_null = IsNotNull(bcx, llbox);
with_cond(bcx, not_null, |bcx| {
let bcx = drop_ty(bcx, v0, content_ty, None);
|
use llvm::{ValueRef, True, get_param};
use llvm;
use middle::lang_items::ExchangeFreeFnLangItem;
|
random_line_split
|
|
glue.rs
|
ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
source_location: Option<NodeInfo>)
-> Block<'blk, 'tcx> {
// NB: v is an *alias* of type t here, not a direct value.
debug!("drop_ty(t={})", t.repr(bcx.tcx()));
let _icx = push_ctxt("drop_ty");
if ty::type_needs_drop(bcx.tcx(), t) {
let ccx = bcx.ccx();
let glue = get_drop_glue(ccx, t);
let glue_type = get_drop_glue_type(ccx, t);
let ptr = if glue_type!= t {
PointerCast(bcx, v, type_of(ccx, glue_type).ptr_to())
} else {
v
};
match source_location {
Some(sl) => debuginfo::set_source_location(bcx.fcx, sl.id, sl.span),
None => debuginfo::clear_source_location(bcx.fcx)
};
Call(bcx, glue, &[ptr], None);
}
bcx
}
pub fn drop_ty_immediate<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
source_location: Option<NodeInfo>)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("drop_ty_immediate");
let vp = alloca(bcx, type_of(bcx.ccx(), t), "");
Store(bcx, v, vp);
drop_ty(bcx, vp, t, source_location)
}
pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ValueRef {
debug!("make drop glue for {}", ppaux::ty_to_string(ccx.tcx(), t));
let t = get_drop_glue_type(ccx, t);
debug!("drop glue type {}", ppaux::ty_to_string(ccx.tcx(), t));
match ccx.drop_glues().borrow().get(&t) {
Some(&glue) => return glue,
_ => { }
}
let llty = if ty::type_is_sized(ccx.tcx(), t) {
type_of(ccx, t).ptr_to()
} else {
type_of(ccx, ty::mk_uniq(ccx.tcx(), t)).ptr_to()
};
let llfnty = Type::glue_fn(ccx, llty);
let (glue, new_sym) = match ccx.available_drop_glues().borrow().get(&t) {
Some(old_sym) => {
let glue = decl_cdecl_fn(ccx, old_sym.as_slice(), llfnty, ty::mk_nil(ccx.tcx()));
(glue, None)
},
None => {
let (sym, glue) = declare_generic_glue(ccx, t, llfnty, "drop");
(glue, Some(sym))
},
};
ccx.drop_glues().borrow_mut().insert(t, glue);
// To avoid infinite recursion, don't `make_drop_glue` until after we've
// added the entry to the `drop_glues` cache.
match new_sym {
Some(sym) => {
ccx.available_drop_glues().borrow_mut().insert(t, sym);
// We're creating a new drop glue, so also generate a body.
make_generic_glue(ccx, t, glue, make_drop_glue, "drop");
},
None => {},
}
glue
}
fn trans_struct_drop_flag<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
t: Ty<'tcx>,
v0: ValueRef,
dtor_did: ast::DefId,
class_did: ast::DefId,
substs: &subst::Substs<'tcx>)
-> Block<'blk, 'tcx> {
let repr = adt::represent_type(bcx.ccx(), t);
let struct_data = if ty::type_is_sized(bcx.tcx(), t) {
v0
} else {
let llval = GEPi(bcx, v0, &[0, abi::slice_elt_base]);
Load(bcx, llval)
};
let drop_flag = unpack_datum!(bcx, adt::trans_drop_flag_ptr(bcx, &*repr, struct_data));
with_cond(bcx, load_ty(bcx, drop_flag.val, ty::mk_bool()), |cx| {
trans_struct_drop(cx, t, v0, dtor_did, class_did, substs)
})
}
fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
t: Ty<'tcx>,
v0: ValueRef,
dtor_did: ast::DefId,
class_did: ast::DefId,
substs: &subst::Substs<'tcx>)
-> Block<'blk, 'tcx>
|
bcx.ty_to_string(fty)).as_slice())
};
let (struct_data, info) = if ty::type_is_sized(bcx.tcx(), t) {
(v0, None)
} else {
let data = GEPi(bcx, v0, &[0, abi::slice_elt_base]);
let info = GEPi(bcx, v0, &[0, abi::slice_elt_len]);
(Load(bcx, data), Some(Load(bcx, info)))
};
adt::fold_variants(bcx, &*repr, struct_data, |variant_cx, st, value| {
// Be sure to put all of the fields into a scope so we can use an invoke
// instruction to call the user destructor but still call the field
// destructors if the user destructor panics.
let field_scope = variant_cx.fcx.push_custom_cleanup_scope();
// Class dtors have no explicit args, so the params should
// just consist of the environment (self).
assert_eq!(params.len(), 1);
let self_arg = if ty::type_is_fat_ptr(bcx.tcx(), self_ty) {
// The dtor expects a fat pointer, so make one, even if we have to fake it.
let boxed_ty = ty::mk_open(bcx.tcx(), t);
let scratch = datum::rvalue_scratch_datum(bcx, boxed_ty, "__fat_ptr_drop_self");
Store(bcx, value, GEPi(bcx, scratch.val, &[0, abi::slice_elt_base]));
Store(bcx,
// If we just had a thin pointer, make a fat pointer by sticking
// null where we put the unsizing info. This works because t
// is a sized type, so we will only unpack the fat pointer, never
// use the fake info.
info.unwrap_or(C_null(Type::i8p(bcx.ccx()))),
GEPi(bcx, scratch.val, &[0, abi::slice_elt_len]));
PointerCast(variant_cx, scratch.val, params[0])
} else {
PointerCast(variant_cx, value, params[0])
};
let args = vec!(self_arg);
// Add all the fields as a value which needs to be cleaned at the end of
// this scope. Iterate in reverse order so a Drop impl doesn't reverse
// the order in which fields get dropped.
for (i, ty) in st.fields.iter().enumerate().rev() {
let llfld_a = adt::struct_field_ptr(variant_cx, &*st, value, i, false);
let val = if ty::type_is_sized(bcx.tcx(), *ty) {
llfld_a
} else {
let boxed_ty = ty::mk_open(bcx.tcx(), *ty);
let scratch = datum::rvalue_scratch_datum(bcx, boxed_ty, "__fat_ptr_drop_field");
Store(bcx, llfld_a, GEPi(bcx, scratch.val, &[0, abi::slice_elt_base]));
Store(bcx, info.unwrap(), GEPi(bcx, scratch.val, &[0, abi::slice_elt_len]));
scratch.val
};
variant_cx.fcx.schedule_drop_mem(cleanup::CustomScope(field_scope),
val, *ty);
}
let dtor_ty = ty::mk_ctor_fn(bcx.tcx(),
&[get_drop_glue_type(bcx.ccx(), t)],
ty::mk_nil(bcx.tcx()));
let (_, variant_cx) = invoke(variant_cx, dtor_addr, args, dtor_ty, None, false);
variant_cx.fcx.pop_and_trans_custom_cleanup_scope(variant_cx, field_scope);
variant_cx
})
}
fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info: ValueRef)
-> (ValueRef, ValueRef) {
debug!("calculate size of DST: {}; with lost info: {}",
bcx.ty_to_string(t), bcx.val_to_string(info));
if ty::type_is_sized(bcx.tcx(), t) {
let sizing_type = sizing_type_of(bcx.ccx(), t);
let size = C_uint(bcx.ccx(), llsize_of_alloc(bcx.ccx(), sizing_type));
let align = C_uint(bcx.ccx(), align_of(bcx.ccx(), t));
return (size, align);
}
match t.sty {
ty::ty_struct(id, ref substs) => {
let ccx = bcx.ccx();
// First get the size of all statically known fields.
// Don't use type_of::sizing_type_of because that expects t to be sized.
assert!(!ty::type_is_simd(bcx.tcx(), t));
let repr = adt::represent_type(ccx, t);
let sizing_type = adt::sizing_type_of(ccx, &*repr, true);
let sized_size = C_uint(ccx, llsize_of_alloc(ccx, sizing_type));
let sized_align = C_uint(ccx, llalign_of_min(ccx, sizing_type));
// Recurse to get the size of the dynamically sized field (must be
// the last field).
let fields = ty::struct_fields(bcx.tcx(), id, substs);
let last_field = fields[fields.len()-1];
let field_ty = last_field.mt.ty;
let (unsized_size, unsized_align) = size_and_align_of_dst(bcx, field_ty, info);
// Return the sum of sizes and max of aligns.
let size = Add(bcx, sized_size, unsized_size);
let align = Select(bcx,
ICmp(bcx, llvm::IntULT, sized_align, unsized_align),
sized_align,
unsized_align);
(size, align)
}
ty::ty_trait(..) => {
// info points to the vtable and the second entry in the vtable is the
// dynamic size of the object.
let info = PointerCast(bcx, info, Type::int(bcx.ccx()).ptr_to());
let size_ptr = GEPi(bcx, info, &[1u]);
let align_ptr = GEPi(bcx, info, &[2u]);
(Load(bcx, size_ptr), Load(bcx, align_ptr))
}
ty::ty_vec(unit_ty, None) => {
// The info in this case is the length of the vec, so the size is that
// times the unit size.
let llunit_ty = sizing_type_of(bcx.ccx(), unit_ty);
let unit_size = llsize_of_alloc(bcx.ccx(), llunit_ty);
(Mul(bcx, info, C_uint(bcx.ccx(), unit_size)), C_uint(bcx.ccx(), 8u))
}
_ => bcx.sess().bug(format!("Unexpected unsized type, found {}",
bcx.ty_to_string(t)).as_slice())
}
}
fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>)
-> Block<'blk, 'tcx> {
// NB: v0 is an *alias* of type t here, not a direct value.
let _icx = push_ctxt("make_drop_glue");
match t.sty {
ty::ty_uniq(content_ty) => {
match content_ty.sty {
ty::ty_vec(ty, None) => {
tvec::make_drop_glue_unboxed(bcx, v0, ty, true)
}
ty::ty_str => {
let unit_ty = ty::sequence_element_type(bcx.tcx(), content_ty);
tvec::make_drop_glue_unboxed(bcx, v0, unit_ty, true)
}
ty::ty_trait(..) => {
let lluniquevalue = GEPi(bcx, v0, &[0, abi::trt_field_box]);
// Only drop the value when it is non-null
let concrete_ptr = Load(bcx, lluniquevalue);
with_cond(bcx, IsNotNull(bcx, concrete_ptr), |bcx| {
let dtor_ptr = Load(bcx, GEPi(bcx, v0, &[0, abi::trt_field_vtable]));
let dtor = Load(bcx, dtor_ptr);
Call(bcx,
dtor,
&[PointerCast(bcx, lluniquevalue, Type::i8p(bcx.ccx()))],
None);
bcx
})
}
ty::ty_struct(..) if!ty::type_is_sized(bcx.tcx(), content_ty) => {
let llval = GEPi(bcx, v0, &[0, abi::slice_elt_base]);
let llbox = Load(bcx, llval);
let not_null = IsNotNull(bcx, llbox);
with_cond(bcx, not_null, |bcx| {
let bcx = drop_ty(bcx, v0, content_ty, None);
let info = GEPi(bcx, v0, &[0, abi::slice_elt_len]);
let info = Load(bcx, info);
let (llsize, llalign) = size_and_align_of_dst(bcx, content_ty, info);
trans_exchange_free_dyn(bcx, llbox, llsize, llalign)
})
}
_ => {
assert!(ty::type_is_sized(bcx.tcx(), content_ty));
let llval = v0;
let llbox = Load(bcx, llval);
let not_null = IsNotNull(bcx, llbox);
with_cond(bcx, not_null, |bcx| {
let bcx = drop_ty(bcx, llbox, content_ty, None);
trans_exchange_free_ty(bcx, llbox, content_ty)
})
}
}
}
ty::ty_struct(did, ref substs) | ty::ty_enum(did, ref substs) => {
let tcx = bcx.tcx();
match ty::ty_dtor(tcx, did) {
ty::TraitDtor(dtor, true) => {
// FIXME(16758) Since the struct is unsized, it is hard to
// find the drop flag (which is at the end of the struct).
// Lets just ignore the flag and pretend everything will be
// OK.
if ty::type_is_sized(bcx.tcx(), t) {
trans_struct_drop_flag(bcx, t, v0, dtor, did, substs)
} else {
// Give the user a heads up that we are doing something
// stupid and dangerous.
bcx.sess().warn(format!("Ignoring drop flag in destructor for {}\
because the struct is unsized. See issue\
#16758",
bcx.ty_to_string(t)).as_slice());
trans_struct_drop(bcx, t, v0, dtor, did, substs)
}
}
ty::TraitDtor(dtor, false) => {
trans_struct_drop(bcx, t, v0, dtor, did, substs)
}
ty::NoDtor => {
// No dtor?
|
{
let repr = adt::represent_type(bcx.ccx(), t);
// Find and call the actual destructor
let dtor_addr = get_res_dtor(bcx.ccx(), dtor_did, t,
class_did, substs);
// The first argument is the "self" argument for drop
let params = unsafe {
let ty = Type::from_ref(llvm::LLVMTypeOf(dtor_addr));
ty.element_type().func_params()
};
let fty = ty::lookup_item_type(bcx.tcx(), dtor_did).ty.subst(bcx.tcx(), substs);
let self_ty = match fty.sty {
ty::ty_bare_fn(ref f) => {
assert!(f.sig.inputs.len() == 1);
f.sig.inputs[0]
}
_ => bcx.sess().bug(format!("Expected function type, found {}",
|
identifier_body
|
errors.rs
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate http;
extern crate hyper;
// Add a simple union type of our various sources of HTTP-ish errors.
#[derive(Debug)]
pub enum HttpError {
Hyper(hyper::Error),
Generic(http::Error),
Status(hyper::StatusCode),
Uri,
Body,
UploadFailed,
}
impl From<hyper::Error> for HttpError {
fn from(err: hyper::Error) -> Self {
HttpError::Hyper(err)
}
}
impl From<http::uri::InvalidUri> for HttpError {
fn
|
(_: http::uri::InvalidUri) -> Self {
HttpError::Uri
}
}
impl From<http::Error> for HttpError {
fn from(err: http::Error) -> Self {
HttpError::Generic(err)
}
}
impl HttpError {
// Encode the GCS rules on retrying.
pub fn should_retry_gcs(&self) -> bool {
match self {
// If hyper says this is a user or parse error, it won't
// change on a retry.
Self::Hyper(e) =>!e.is_parse() &&!e.is_user(),
// https://cloud.google.com/storage/docs/json_api/v1/status-codes
// come in handy as a guide. In fact, it lists *408* as an
// additional thing you should retry on (not just the usual 429).
Self::Status(code) => match code.as_u16() {
// All 2xx are good (and aren't expected here, but are okay)
200..=299 => true,
// Any 3xx is "bad".
300..=399 => false,
// Both 429 *and* 408 are documented as needing retries.
408 | 429 => true,
// Other 4xx should not be retried.
400..=499 => false,
// Any 5xx is fair game for retry.
500..=599 => true,
// Anything else is a real surprise.
_ => false,
},
// TODO(boulos): Add more here. As we need them.
_ => {
debug!("Asked for should_retry_gcs({:#?}). Saying no", self);
false
}
}
}
}
|
from
|
identifier_name
|
errors.rs
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate http;
extern crate hyper;
// Add a simple union type of our various sources of HTTP-ish errors.
#[derive(Debug)]
pub enum HttpError {
Hyper(hyper::Error),
Generic(http::Error),
Status(hyper::StatusCode),
Uri,
Body,
UploadFailed,
}
impl From<hyper::Error> for HttpError {
fn from(err: hyper::Error) -> Self {
HttpError::Hyper(err)
}
}
impl From<http::uri::InvalidUri> for HttpError {
fn from(_: http::uri::InvalidUri) -> Self {
HttpError::Uri
}
}
impl From<http::Error> for HttpError {
fn from(err: http::Error) -> Self {
HttpError::Generic(err)
}
}
impl HttpError {
// Encode the GCS rules on retrying.
pub fn should_retry_gcs(&self) -> bool
|
_ => false,
},
// TODO(boulos): Add more here. As we need them.
_ => {
debug!("Asked for should_retry_gcs({:#?}). Saying no", self);
false
}
}
}
}
|
{
match self {
// If hyper says this is a user or parse error, it won't
// change on a retry.
Self::Hyper(e) => !e.is_parse() && !e.is_user(),
// https://cloud.google.com/storage/docs/json_api/v1/status-codes
// come in handy as a guide. In fact, it lists *408* as an
// additional thing you should retry on (not just the usual 429).
Self::Status(code) => match code.as_u16() {
// All 2xx are good (and aren't expected here, but are okay)
200..=299 => true,
// Any 3xx is "bad".
300..=399 => false,
// Both 429 *and* 408 are documented as needing retries.
408 | 429 => true,
// Other 4xx should not be retried.
400..=499 => false,
// Any 5xx is fair game for retry.
500..=599 => true,
// Anything else is a real surprise.
|
identifier_body
|
errors.rs
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate http;
extern crate hyper;
// Add a simple union type of our various sources of HTTP-ish errors.
#[derive(Debug)]
pub enum HttpError {
Hyper(hyper::Error),
Generic(http::Error),
Status(hyper::StatusCode),
Uri,
Body,
UploadFailed,
}
|
fn from(err: hyper::Error) -> Self {
HttpError::Hyper(err)
}
}
impl From<http::uri::InvalidUri> for HttpError {
fn from(_: http::uri::InvalidUri) -> Self {
HttpError::Uri
}
}
impl From<http::Error> for HttpError {
fn from(err: http::Error) -> Self {
HttpError::Generic(err)
}
}
impl HttpError {
// Encode the GCS rules on retrying.
pub fn should_retry_gcs(&self) -> bool {
match self {
// If hyper says this is a user or parse error, it won't
// change on a retry.
Self::Hyper(e) =>!e.is_parse() &&!e.is_user(),
// https://cloud.google.com/storage/docs/json_api/v1/status-codes
// come in handy as a guide. In fact, it lists *408* as an
// additional thing you should retry on (not just the usual 429).
Self::Status(code) => match code.as_u16() {
// All 2xx are good (and aren't expected here, but are okay)
200..=299 => true,
// Any 3xx is "bad".
300..=399 => false,
// Both 429 *and* 408 are documented as needing retries.
408 | 429 => true,
// Other 4xx should not be retried.
400..=499 => false,
// Any 5xx is fair game for retry.
500..=599 => true,
// Anything else is a real surprise.
_ => false,
},
// TODO(boulos): Add more here. As we need them.
_ => {
debug!("Asked for should_retry_gcs({:#?}). Saying no", self);
false
}
}
}
}
|
impl From<hyper::Error> for HttpError {
|
random_line_split
|
rechunker.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![deny(warnings)]
use anyhow::{format_err, Error};
use blobrepo::BlobRepo;
use blobstore::{Loadable, PutBehaviour};
use clap_old::Arg;
use cloned::cloned;
use context::CoreContext;
use fbinit::FacebookInit;
use futures::stream::{self, TryStreamExt};
use mercurial_types::{HgFileNodeId, HgNodeHash};
use std::str::FromStr;
use cmdlib::{args, helpers::block_execute};
const NAME: &str = "rechunker";
const DEFAULT_NUM_JOBS: usize = 10;
#[fbinit::main]
fn
|
(fb: FacebookInit) -> Result<(), Error> {
let matches = args::MononokeAppBuilder::new(NAME)
.with_advanced_args_hidden()
.with_special_put_behaviour(PutBehaviour::Overwrite)
.build()
.about("Rechunk blobs using the filestore")
.arg(
Arg::with_name("filenodes")
.value_name("FILENODES")
.takes_value(true)
.required(true)
.min_values(1)
.help("filenode IDs for blobs to be rechunked"),
)
.arg(
Arg::with_name("jobs")
.short("j")
.long("jobs")
.value_name("JOBS")
.takes_value(true)
.help("The number of filenodes to rechunk in parallel"),
)
.get_matches(fb)?;
let logger = matches.logger();
let ctx = CoreContext::new_with_logger(fb, logger.clone());
let jobs: usize = matches
.value_of("jobs")
.map_or(Ok(DEFAULT_NUM_JOBS), |j| j.parse())
.map_err(Error::from)?;
let filenode_ids: Vec<_> = matches
.values_of("filenodes")
.unwrap()
.into_iter()
.map(|f| {
HgNodeHash::from_str(f)
.map(HgFileNodeId::new)
.map_err(|e| format_err!("Invalid Sha1: {}", e))
})
.collect();
let blobrepo = args::open_repo(fb, logger, &matches);
let rechunk = async move {
let blobrepo: BlobRepo = blobrepo.await?;
stream::iter(filenode_ids)
.try_for_each_concurrent(jobs, |fid| {
cloned!(blobrepo, ctx);
async move {
let env = fid.load(&ctx, blobrepo.blobstore()).await?;
let content_id = env.content_id();
filestore::force_rechunk(
&blobrepo.get_blobstore(),
blobrepo.filestore_config().clone(),
&ctx,
content_id,
)
.await
.map(|_| ())
}
})
.await
};
block_execute(
rechunk,
fb,
"rechunker",
logger,
&matches,
cmdlib::monitoring::AliveService,
)
}
|
main
|
identifier_name
|
rechunker.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![deny(warnings)]
use anyhow::{format_err, Error};
use blobrepo::BlobRepo;
use blobstore::{Loadable, PutBehaviour};
use clap_old::Arg;
use cloned::cloned;
use context::CoreContext;
use fbinit::FacebookInit;
use futures::stream::{self, TryStreamExt};
use mercurial_types::{HgFileNodeId, HgNodeHash};
use std::str::FromStr;
use cmdlib::{args, helpers::block_execute};
const NAME: &str = "rechunker";
const DEFAULT_NUM_JOBS: usize = 10;
#[fbinit::main]
fn main(fb: FacebookInit) -> Result<(), Error>
|
.help("The number of filenodes to rechunk in parallel"),
)
.get_matches(fb)?;
let logger = matches.logger();
let ctx = CoreContext::new_with_logger(fb, logger.clone());
let jobs: usize = matches
.value_of("jobs")
.map_or(Ok(DEFAULT_NUM_JOBS), |j| j.parse())
.map_err(Error::from)?;
let filenode_ids: Vec<_> = matches
.values_of("filenodes")
.unwrap()
.into_iter()
.map(|f| {
HgNodeHash::from_str(f)
.map(HgFileNodeId::new)
.map_err(|e| format_err!("Invalid Sha1: {}", e))
})
.collect();
let blobrepo = args::open_repo(fb, logger, &matches);
let rechunk = async move {
let blobrepo: BlobRepo = blobrepo.await?;
stream::iter(filenode_ids)
.try_for_each_concurrent(jobs, |fid| {
cloned!(blobrepo, ctx);
async move {
let env = fid.load(&ctx, blobrepo.blobstore()).await?;
let content_id = env.content_id();
filestore::force_rechunk(
&blobrepo.get_blobstore(),
blobrepo.filestore_config().clone(),
&ctx,
content_id,
)
.await
.map(|_| ())
}
})
.await
};
block_execute(
rechunk,
fb,
"rechunker",
logger,
&matches,
cmdlib::monitoring::AliveService,
)
}
|
{
let matches = args::MononokeAppBuilder::new(NAME)
.with_advanced_args_hidden()
.with_special_put_behaviour(PutBehaviour::Overwrite)
.build()
.about("Rechunk blobs using the filestore")
.arg(
Arg::with_name("filenodes")
.value_name("FILENODES")
.takes_value(true)
.required(true)
.min_values(1)
.help("filenode IDs for blobs to be rechunked"),
)
.arg(
Arg::with_name("jobs")
.short("j")
.long("jobs")
.value_name("JOBS")
.takes_value(true)
|
identifier_body
|
rechunker.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![deny(warnings)]
use anyhow::{format_err, Error};
use blobrepo::BlobRepo;
use blobstore::{Loadable, PutBehaviour};
use clap_old::Arg;
use cloned::cloned;
use context::CoreContext;
use fbinit::FacebookInit;
use futures::stream::{self, TryStreamExt};
use mercurial_types::{HgFileNodeId, HgNodeHash};
use std::str::FromStr;
use cmdlib::{args, helpers::block_execute};
const NAME: &str = "rechunker";
const DEFAULT_NUM_JOBS: usize = 10;
#[fbinit::main]
fn main(fb: FacebookInit) -> Result<(), Error> {
let matches = args::MononokeAppBuilder::new(NAME)
.with_advanced_args_hidden()
.with_special_put_behaviour(PutBehaviour::Overwrite)
.build()
|
.arg(
Arg::with_name("filenodes")
.value_name("FILENODES")
.takes_value(true)
.required(true)
.min_values(1)
.help("filenode IDs for blobs to be rechunked"),
)
.arg(
Arg::with_name("jobs")
.short("j")
.long("jobs")
.value_name("JOBS")
.takes_value(true)
.help("The number of filenodes to rechunk in parallel"),
)
.get_matches(fb)?;
let logger = matches.logger();
let ctx = CoreContext::new_with_logger(fb, logger.clone());
let jobs: usize = matches
.value_of("jobs")
.map_or(Ok(DEFAULT_NUM_JOBS), |j| j.parse())
.map_err(Error::from)?;
let filenode_ids: Vec<_> = matches
.values_of("filenodes")
.unwrap()
.into_iter()
.map(|f| {
HgNodeHash::from_str(f)
.map(HgFileNodeId::new)
.map_err(|e| format_err!("Invalid Sha1: {}", e))
})
.collect();
let blobrepo = args::open_repo(fb, logger, &matches);
let rechunk = async move {
let blobrepo: BlobRepo = blobrepo.await?;
stream::iter(filenode_ids)
.try_for_each_concurrent(jobs, |fid| {
cloned!(blobrepo, ctx);
async move {
let env = fid.load(&ctx, blobrepo.blobstore()).await?;
let content_id = env.content_id();
filestore::force_rechunk(
&blobrepo.get_blobstore(),
blobrepo.filestore_config().clone(),
&ctx,
content_id,
)
.await
.map(|_| ())
}
})
.await
};
block_execute(
rechunk,
fb,
"rechunker",
logger,
&matches,
cmdlib::monitoring::AliveService,
)
}
|
.about("Rechunk blobs using the filestore")
|
random_line_split
|
main.rs
|
use std::io;
use std::io::Write;
fn
|
(a: &str, b: &str) -> bool {
if a.len()!= b.len() {
return false;
}
for c in a.chars() {
if!b.contains(c) {
return false;
}
}
true
}
fn main() -> Result<(), std::io::Error> {
print!("Enter two strings and I'll tell you if then are anagrams: ");
io::stdout().flush()?;
let mut first = String::new();
io::stdin().read_line(&mut first)?;
let first = first.trim();
print!("Enter the second string: ");
io::stdout().flush()?;
let mut second = String::new();
io::stdin().read_line(&mut second)?;
let second = second.trim();
let ok = is_anagram(first, second);
let mut not = "";
if!ok {
not = "not "
}
println!(r#"{} and {} are {}anagrams."#, first, second, not);
Ok(())
}
|
is_anagram
|
identifier_name
|
main.rs
|
use std::io;
use std::io::Write;
fn is_anagram(a: &str, b: &str) -> bool {
if a.len()!= b.len() {
return false;
}
for c in a.chars() {
if!b.contains(c) {
return false;
}
}
true
}
fn main() -> Result<(), std::io::Error> {
print!("Enter two strings and I'll tell you if then are anagrams: ");
io::stdout().flush()?;
let mut first = String::new();
io::stdin().read_line(&mut first)?;
let first = first.trim();
print!("Enter the second string: ");
io::stdout().flush()?;
let mut second = String::new();
io::stdin().read_line(&mut second)?;
let second = second.trim();
let ok = is_anagram(first, second);
let mut not = "";
if!ok
|
println!(r#"{} and {} are {}anagrams."#, first, second, not);
Ok(())
}
|
{
not = "not "
}
|
conditional_block
|
main.rs
|
use std::io;
use std::io::Write;
fn is_anagram(a: &str, b: &str) -> bool
|
fn main() -> Result<(), std::io::Error> {
print!("Enter two strings and I'll tell you if then are anagrams: ");
io::stdout().flush()?;
let mut first = String::new();
io::stdin().read_line(&mut first)?;
let first = first.trim();
print!("Enter the second string: ");
io::stdout().flush()?;
let mut second = String::new();
io::stdin().read_line(&mut second)?;
let second = second.trim();
let ok = is_anagram(first, second);
let mut not = "";
if!ok {
not = "not "
}
println!(r#"{} and {} are {}anagrams."#, first, second, not);
Ok(())
}
|
{
if a.len() != b.len() {
return false;
}
for c in a.chars() {
if !b.contains(c) {
return false;
}
}
true
}
|
identifier_body
|
main.rs
|
use std::io;
use std::io::Write;
fn is_anagram(a: &str, b: &str) -> bool {
if a.len()!= b.len() {
return false;
}
for c in a.chars() {
if!b.contains(c) {
return false;
}
}
true
}
fn main() -> Result<(), std::io::Error> {
print!("Enter two strings and I'll tell you if then are anagrams: ");
io::stdout().flush()?;
let mut first = String::new();
io::stdin().read_line(&mut first)?;
let first = first.trim();
print!("Enter the second string: ");
io::stdout().flush()?;
let mut second = String::new();
io::stdin().read_line(&mut second)?;
let second = second.trim();
let ok = is_anagram(first, second);
let mut not = "";
|
Ok(())
}
|
if !ok {
not = "not "
}
println!(r#"{} and {} are {}anagrams."#, first, second, not);
|
random_line_split
|
mod.rs
|
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::geometry::Aabb;
use nalgebra::{Isometry3, Point3, RealField, Scalar, UnitQuaternion, Vector3};
use nav_types::{ECEF, WGS84};
use serde::{Deserialize, Serialize};
use std::fmt;
use std::str::FromStr;
#[macro_use]
pub mod base;
pub mod sat;
pub mod web_mercator;
pub use base::*;
pub use sat::*;
pub use web_mercator::*;
/// Lower bound for distance from earth's center.
/// See https://en.wikipedia.org/wiki/Earth_radius#Geophysical_extremes
pub const EARTH_RADIUS_MIN_M: f64 = 6_352_800.0;
/// Upper bound for distance from earth's center.
/// See https://en.wikipedia.org/wiki/Earth_radius#Geophysical_extremes
pub const EARTH_RADIUS_MAX_M: f64 = 6_384_400.0;
#[derive(Debug)]
pub struct ParseClosedIntervalError(String);
impl std::error::Error for ParseClosedIntervalError {
fn source(&self) -> Option<&(dyn std::error::Error +'static)> {
None
}
}
impl fmt::Display for ParseClosedIntervalError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl From<std::num::ParseIntError> for ParseClosedIntervalError {
fn from(error: std::num::ParseIntError) -> Self {
Self(error.to_string())
}
}
impl From<std::num::ParseFloatError> for ParseClosedIntervalError {
fn from(error: std::num::ParseFloatError) -> Self {
Self(error.to_string())
}
}
/// An interval, intended to be read from a command line argument
/// and to be used in filtering the point cloud via an attribute.
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub struct ClosedInterval<T> {
lower_bound: T,
upper_bound: T,
}
impl<T> ClosedInterval<T>
where
T: PartialOrd,
{
pub fn new(lower_bound: T, upper_bound: T) -> Self {
assert!(
lower_bound <= upper_bound,
"Lower bound needs to be smaller or equal to upper bound."
);
Self {
lower_bound,
upper_bound,
}
}
pub fn contains(self, value: T) -> bool {
self.lower_bound <= value && value <= self.upper_bound
}
}
impl<T> FromStr for ClosedInterval<T>
where
T: std::str::FromStr,
ParseClosedIntervalError: From<T::Err>,
{
type Err = ParseClosedIntervalError;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
let bounds: Vec<&str> = s.split(',').collect();
if bounds.len()!= 2
|
Ok(ClosedInterval {
lower_bound: bounds[0].parse()?,
upper_bound: bounds[1].parse()?,
})
}
}
/// Convenience trait to get a CellID from a Point3.
/// `From<Point3<S>>` cannot be used because of orphan rules.
pub trait FromPoint3<S: Scalar> {
fn from_point(p: &Point3<S>) -> Self;
}
impl<S> FromPoint3<S> for s2::cellid::CellID
where
S: Scalar,
f64: From<S>,
{
fn from_point(p: &Point3<S>) -> Self {
s2::cellid::CellID::from(s2::point::Point::from_coords(
f64::from(p.x.clone()),
f64::from(p.y.clone()),
f64::from(p.z.clone()),
))
}
}
impl<S: RealField> FromPoint3<S> for ECEF<S> {
fn from_point(p: &Point3<S>) -> Self {
ECEF::new(p.x, p.y, p.z)
}
}
/// Implementation of PointCulling which returns all points
#[derive(Copy, Clone, Debug, Serialize, Deserialize)]
pub struct AllPoints {}
impl IntersectAabb for AllPoints {
fn intersect_aabb(&self, _aabb: &Aabb) -> bool {
true
}
}
impl<'a> HasAabbIntersector<'a> for AllPoints {
type Intersector = Self;
fn aabb_intersector(&'a self) -> Self::Intersector {
*self
}
}
impl PointCulling for AllPoints {
fn contains(&self, _p: &Point3<f64>) -> bool {
true
}
}
// Returns transform needed to go from ECEF to local frame with the specified origin where
// the axes are ENU (east, north, up <in the direction normal to the oblate spheroid
// used as Earth's ellipsoid, which does not generally pass through the center of the Earth>)
// https://en.wikipedia.org/wiki/Geographic_coordinate_conversion#From_ECEF_to_ENU
pub fn local_frame_from_lat_lng(lat: f64, lon: f64) -> Isometry3<f64> {
let lat_lng_alt = WGS84::from_degrees_and_meters(lat, lon, 0.0);
let origin = ECEF::from(lat_lng_alt);
let origin_vector = Vector3::new(origin.x(), origin.y(), origin.z());
let rot_1 = UnitQuaternion::from_axis_angle(&Vector3::z_axis(), -std::f64::consts::FRAC_PI_2);
let rot_2 = UnitQuaternion::from_axis_angle(
&Vector3::y_axis(),
lat_lng_alt.latitude_radians() - std::f64::consts::FRAC_PI_2,
);
let rot_3 =
UnitQuaternion::from_axis_angle(&Vector3::z_axis(), -lat_lng_alt.longitude_radians());
let rotation = rot_1 * rot_2 * rot_3;
Isometry3::from_parts(rotation.transform_vector(&-origin_vector).into(), rotation)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::geometry::{Aabb, Frustum, Perspective};
use nalgebra::{UnitQuaternion, Vector3};
#[test]
fn test_inverse() {
let persp = Perspective::new(-0.123, 0.45, 0.04, 0.75, 1.0, 4.0);
let reference_inverse = persp.as_matrix().try_inverse().unwrap();
let inverse = persp.inverse();
let diff = (reference_inverse - inverse).abs();
assert!(diff.max() < 1e-6, "diff.max() is {}", diff.max());
}
#[test]
fn test_frustum_intersects_aabb() {
let rot: Isometry3<f64> = nalgebra::convert(UnitQuaternion::from_axis_angle(
&Vector3::x_axis(),
std::f64::consts::PI,
));
let perspective = Perspective::new(
/* left */ -0.5, /* right */ 0.0, /* bottom */ -0.5, /* top */ 0.0,
/* near */ 1.0, /* far */ 4.0,
);
let frustum = Frustum::new(rot, perspective);
let bbox_min = Point3::new(-0.5, 0.25, 1.5);
let bbox_max = Point3::new(-0.25, 0.5, 3.5);
let bbox = Aabb::new(bbox_min, bbox_max);
assert_eq!(
frustum.intersector().intersect(&bbox.intersector()),
Relation::In
);
assert!(frustum.contains(&bbox_min));
assert!(frustum.contains(&bbox_max));
}
}
|
{
return Err(ParseClosedIntervalError(
"An interval needs to be defined by exactly 2 bounds.".into(),
));
}
|
conditional_block
|
mod.rs
|
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::geometry::Aabb;
use nalgebra::{Isometry3, Point3, RealField, Scalar, UnitQuaternion, Vector3};
use nav_types::{ECEF, WGS84};
use serde::{Deserialize, Serialize};
use std::fmt;
use std::str::FromStr;
|
pub use sat::*;
pub use web_mercator::*;
/// Lower bound for distance from earth's center.
/// See https://en.wikipedia.org/wiki/Earth_radius#Geophysical_extremes
pub const EARTH_RADIUS_MIN_M: f64 = 6_352_800.0;
/// Upper bound for distance from earth's center.
/// See https://en.wikipedia.org/wiki/Earth_radius#Geophysical_extremes
pub const EARTH_RADIUS_MAX_M: f64 = 6_384_400.0;
#[derive(Debug)]
pub struct ParseClosedIntervalError(String);
impl std::error::Error for ParseClosedIntervalError {
fn source(&self) -> Option<&(dyn std::error::Error +'static)> {
None
}
}
impl fmt::Display for ParseClosedIntervalError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl From<std::num::ParseIntError> for ParseClosedIntervalError {
fn from(error: std::num::ParseIntError) -> Self {
Self(error.to_string())
}
}
impl From<std::num::ParseFloatError> for ParseClosedIntervalError {
fn from(error: std::num::ParseFloatError) -> Self {
Self(error.to_string())
}
}
/// An interval, intended to be read from a command line argument
/// and to be used in filtering the point cloud via an attribute.
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub struct ClosedInterval<T> {
lower_bound: T,
upper_bound: T,
}
impl<T> ClosedInterval<T>
where
T: PartialOrd,
{
pub fn new(lower_bound: T, upper_bound: T) -> Self {
assert!(
lower_bound <= upper_bound,
"Lower bound needs to be smaller or equal to upper bound."
);
Self {
lower_bound,
upper_bound,
}
}
pub fn contains(self, value: T) -> bool {
self.lower_bound <= value && value <= self.upper_bound
}
}
impl<T> FromStr for ClosedInterval<T>
where
T: std::str::FromStr,
ParseClosedIntervalError: From<T::Err>,
{
type Err = ParseClosedIntervalError;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
let bounds: Vec<&str> = s.split(',').collect();
if bounds.len()!= 2 {
return Err(ParseClosedIntervalError(
"An interval needs to be defined by exactly 2 bounds.".into(),
));
}
Ok(ClosedInterval {
lower_bound: bounds[0].parse()?,
upper_bound: bounds[1].parse()?,
})
}
}
/// Convenience trait to get a CellID from a Point3.
/// `From<Point3<S>>` cannot be used because of orphan rules.
pub trait FromPoint3<S: Scalar> {
fn from_point(p: &Point3<S>) -> Self;
}
impl<S> FromPoint3<S> for s2::cellid::CellID
where
S: Scalar,
f64: From<S>,
{
fn from_point(p: &Point3<S>) -> Self {
s2::cellid::CellID::from(s2::point::Point::from_coords(
f64::from(p.x.clone()),
f64::from(p.y.clone()),
f64::from(p.z.clone()),
))
}
}
impl<S: RealField> FromPoint3<S> for ECEF<S> {
fn from_point(p: &Point3<S>) -> Self {
ECEF::new(p.x, p.y, p.z)
}
}
/// Implementation of PointCulling which returns all points
#[derive(Copy, Clone, Debug, Serialize, Deserialize)]
pub struct AllPoints {}
impl IntersectAabb for AllPoints {
fn intersect_aabb(&self, _aabb: &Aabb) -> bool {
true
}
}
impl<'a> HasAabbIntersector<'a> for AllPoints {
type Intersector = Self;
fn aabb_intersector(&'a self) -> Self::Intersector {
*self
}
}
impl PointCulling for AllPoints {
fn contains(&self, _p: &Point3<f64>) -> bool {
true
}
}
// Returns transform needed to go from ECEF to local frame with the specified origin where
// the axes are ENU (east, north, up <in the direction normal to the oblate spheroid
// used as Earth's ellipsoid, which does not generally pass through the center of the Earth>)
// https://en.wikipedia.org/wiki/Geographic_coordinate_conversion#From_ECEF_to_ENU
pub fn local_frame_from_lat_lng(lat: f64, lon: f64) -> Isometry3<f64> {
let lat_lng_alt = WGS84::from_degrees_and_meters(lat, lon, 0.0);
let origin = ECEF::from(lat_lng_alt);
let origin_vector = Vector3::new(origin.x(), origin.y(), origin.z());
let rot_1 = UnitQuaternion::from_axis_angle(&Vector3::z_axis(), -std::f64::consts::FRAC_PI_2);
let rot_2 = UnitQuaternion::from_axis_angle(
&Vector3::y_axis(),
lat_lng_alt.latitude_radians() - std::f64::consts::FRAC_PI_2,
);
let rot_3 =
UnitQuaternion::from_axis_angle(&Vector3::z_axis(), -lat_lng_alt.longitude_radians());
let rotation = rot_1 * rot_2 * rot_3;
Isometry3::from_parts(rotation.transform_vector(&-origin_vector).into(), rotation)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::geometry::{Aabb, Frustum, Perspective};
use nalgebra::{UnitQuaternion, Vector3};
#[test]
fn test_inverse() {
let persp = Perspective::new(-0.123, 0.45, 0.04, 0.75, 1.0, 4.0);
let reference_inverse = persp.as_matrix().try_inverse().unwrap();
let inverse = persp.inverse();
let diff = (reference_inverse - inverse).abs();
assert!(diff.max() < 1e-6, "diff.max() is {}", diff.max());
}
#[test]
fn test_frustum_intersects_aabb() {
let rot: Isometry3<f64> = nalgebra::convert(UnitQuaternion::from_axis_angle(
&Vector3::x_axis(),
std::f64::consts::PI,
));
let perspective = Perspective::new(
/* left */ -0.5, /* right */ 0.0, /* bottom */ -0.5, /* top */ 0.0,
/* near */ 1.0, /* far */ 4.0,
);
let frustum = Frustum::new(rot, perspective);
let bbox_min = Point3::new(-0.5, 0.25, 1.5);
let bbox_max = Point3::new(-0.25, 0.5, 3.5);
let bbox = Aabb::new(bbox_min, bbox_max);
assert_eq!(
frustum.intersector().intersect(&bbox.intersector()),
Relation::In
);
assert!(frustum.contains(&bbox_min));
assert!(frustum.contains(&bbox_max));
}
}
|
#[macro_use]
pub mod base;
pub mod sat;
pub mod web_mercator;
pub use base::*;
|
random_line_split
|
mod.rs
|
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::geometry::Aabb;
use nalgebra::{Isometry3, Point3, RealField, Scalar, UnitQuaternion, Vector3};
use nav_types::{ECEF, WGS84};
use serde::{Deserialize, Serialize};
use std::fmt;
use std::str::FromStr;
#[macro_use]
pub mod base;
pub mod sat;
pub mod web_mercator;
pub use base::*;
pub use sat::*;
pub use web_mercator::*;
/// Lower bound for distance from earth's center.
/// See https://en.wikipedia.org/wiki/Earth_radius#Geophysical_extremes
pub const EARTH_RADIUS_MIN_M: f64 = 6_352_800.0;
/// Upper bound for distance from earth's center.
/// See https://en.wikipedia.org/wiki/Earth_radius#Geophysical_extremes
pub const EARTH_RADIUS_MAX_M: f64 = 6_384_400.0;
#[derive(Debug)]
pub struct ParseClosedIntervalError(String);
impl std::error::Error for ParseClosedIntervalError {
fn source(&self) -> Option<&(dyn std::error::Error +'static)> {
None
}
}
impl fmt::Display for ParseClosedIntervalError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl From<std::num::ParseIntError> for ParseClosedIntervalError {
fn from(error: std::num::ParseIntError) -> Self {
Self(error.to_string())
}
}
impl From<std::num::ParseFloatError> for ParseClosedIntervalError {
fn from(error: std::num::ParseFloatError) -> Self {
Self(error.to_string())
}
}
/// An interval, intended to be read from a command line argument
/// and to be used in filtering the point cloud via an attribute.
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub struct ClosedInterval<T> {
lower_bound: T,
upper_bound: T,
}
impl<T> ClosedInterval<T>
where
T: PartialOrd,
{
pub fn new(lower_bound: T, upper_bound: T) -> Self {
assert!(
lower_bound <= upper_bound,
"Lower bound needs to be smaller or equal to upper bound."
);
Self {
lower_bound,
upper_bound,
}
}
pub fn contains(self, value: T) -> bool {
self.lower_bound <= value && value <= self.upper_bound
}
}
impl<T> FromStr for ClosedInterval<T>
where
T: std::str::FromStr,
ParseClosedIntervalError: From<T::Err>,
{
type Err = ParseClosedIntervalError;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
let bounds: Vec<&str> = s.split(',').collect();
if bounds.len()!= 2 {
return Err(ParseClosedIntervalError(
"An interval needs to be defined by exactly 2 bounds.".into(),
));
}
Ok(ClosedInterval {
lower_bound: bounds[0].parse()?,
upper_bound: bounds[1].parse()?,
})
}
}
/// Convenience trait to get a CellID from a Point3.
/// `From<Point3<S>>` cannot be used because of orphan rules.
pub trait FromPoint3<S: Scalar> {
fn from_point(p: &Point3<S>) -> Self;
}
impl<S> FromPoint3<S> for s2::cellid::CellID
where
S: Scalar,
f64: From<S>,
{
fn
|
(p: &Point3<S>) -> Self {
s2::cellid::CellID::from(s2::point::Point::from_coords(
f64::from(p.x.clone()),
f64::from(p.y.clone()),
f64::from(p.z.clone()),
))
}
}
impl<S: RealField> FromPoint3<S> for ECEF<S> {
fn from_point(p: &Point3<S>) -> Self {
ECEF::new(p.x, p.y, p.z)
}
}
/// Implementation of PointCulling which returns all points
#[derive(Copy, Clone, Debug, Serialize, Deserialize)]
pub struct AllPoints {}
impl IntersectAabb for AllPoints {
fn intersect_aabb(&self, _aabb: &Aabb) -> bool {
true
}
}
impl<'a> HasAabbIntersector<'a> for AllPoints {
type Intersector = Self;
fn aabb_intersector(&'a self) -> Self::Intersector {
*self
}
}
impl PointCulling for AllPoints {
fn contains(&self, _p: &Point3<f64>) -> bool {
true
}
}
// Returns transform needed to go from ECEF to local frame with the specified origin where
// the axes are ENU (east, north, up <in the direction normal to the oblate spheroid
// used as Earth's ellipsoid, which does not generally pass through the center of the Earth>)
// https://en.wikipedia.org/wiki/Geographic_coordinate_conversion#From_ECEF_to_ENU
pub fn local_frame_from_lat_lng(lat: f64, lon: f64) -> Isometry3<f64> {
let lat_lng_alt = WGS84::from_degrees_and_meters(lat, lon, 0.0);
let origin = ECEF::from(lat_lng_alt);
let origin_vector = Vector3::new(origin.x(), origin.y(), origin.z());
let rot_1 = UnitQuaternion::from_axis_angle(&Vector3::z_axis(), -std::f64::consts::FRAC_PI_2);
let rot_2 = UnitQuaternion::from_axis_angle(
&Vector3::y_axis(),
lat_lng_alt.latitude_radians() - std::f64::consts::FRAC_PI_2,
);
let rot_3 =
UnitQuaternion::from_axis_angle(&Vector3::z_axis(), -lat_lng_alt.longitude_radians());
let rotation = rot_1 * rot_2 * rot_3;
Isometry3::from_parts(rotation.transform_vector(&-origin_vector).into(), rotation)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::geometry::{Aabb, Frustum, Perspective};
use nalgebra::{UnitQuaternion, Vector3};
#[test]
fn test_inverse() {
let persp = Perspective::new(-0.123, 0.45, 0.04, 0.75, 1.0, 4.0);
let reference_inverse = persp.as_matrix().try_inverse().unwrap();
let inverse = persp.inverse();
let diff = (reference_inverse - inverse).abs();
assert!(diff.max() < 1e-6, "diff.max() is {}", diff.max());
}
#[test]
fn test_frustum_intersects_aabb() {
let rot: Isometry3<f64> = nalgebra::convert(UnitQuaternion::from_axis_angle(
&Vector3::x_axis(),
std::f64::consts::PI,
));
let perspective = Perspective::new(
/* left */ -0.5, /* right */ 0.0, /* bottom */ -0.5, /* top */ 0.0,
/* near */ 1.0, /* far */ 4.0,
);
let frustum = Frustum::new(rot, perspective);
let bbox_min = Point3::new(-0.5, 0.25, 1.5);
let bbox_max = Point3::new(-0.25, 0.5, 3.5);
let bbox = Aabb::new(bbox_min, bbox_max);
assert_eq!(
frustum.intersector().intersect(&bbox.intersector()),
Relation::In
);
assert!(frustum.contains(&bbox_min));
assert!(frustum.contains(&bbox_max));
}
}
|
from_point
|
identifier_name
|
salsa20.rs
|
use Error;
use super::ProtectedStream;
use util::{decrypt, sha256};
use crypto::salsa20::Salsa20 as SalsaDecryptor;
use rustc_serialize::base64::FromBase64;
const IV: [u8; 8] = [0xE8, 0x30, 0x09, 0x4B, 0x97, 0x20, 0x5D, 0x2A];
pub struct Salsa20 {
decryptor: SalsaDecryptor,
}
impl Salsa20 {
pub fn new(key: &[u8; 32]) -> Salsa20 {
Salsa20 { decryptor: SalsaDecryptor::new(&sha256(key), &IV) }
}
}
impl ProtectedStream for Salsa20 {
fn decrypt(&mut self, value: &str) -> Result<String, Error> {
let in_buffer = try!(value.from_base64().map_err(|e| Error::Base64(e)));
let result = try!(decrypt(&mut self.decryptor, &in_buffer));
Ok(try!(String::from_utf8(result).map_err(|e| Error::Utf8(e))))
}
}
#[cfg(test)]
mod tests {
use super::Salsa20;
use protected::ProtectedStream;
#[test]
fn
|
() {
let key = [0xE4, 0x70, 0xC4, 0xEF, 0x95, 0x61, 0x22, 0xDF, 0x2C, 0x0D, 0xD1, 0x42, 0x4A,
0x24, 0xE6, 0x87, 0x79, 0x29, 0xB9, 0xAD, 0x47, 0x9C, 0x0E, 0xA5, 0xA0, 0x5D,
0xB1, 0x27, 0x7A, 0xDF, 0xBD, 0xCD];
let mut salsa20 = Salsa20::new(&key);
let result = salsa20.decrypt("9crW5hp7SQ==").unwrap();
assert_eq!(result, "hunter2");
}
}
|
should_decrypt_password
|
identifier_name
|
salsa20.rs
|
use Error;
use super::ProtectedStream;
use util::{decrypt, sha256};
use crypto::salsa20::Salsa20 as SalsaDecryptor;
use rustc_serialize::base64::FromBase64;
const IV: [u8; 8] = [0xE8, 0x30, 0x09, 0x4B, 0x97, 0x20, 0x5D, 0x2A];
pub struct Salsa20 {
decryptor: SalsaDecryptor,
|
pub fn new(key: &[u8; 32]) -> Salsa20 {
Salsa20 { decryptor: SalsaDecryptor::new(&sha256(key), &IV) }
}
}
impl ProtectedStream for Salsa20 {
fn decrypt(&mut self, value: &str) -> Result<String, Error> {
let in_buffer = try!(value.from_base64().map_err(|e| Error::Base64(e)));
let result = try!(decrypt(&mut self.decryptor, &in_buffer));
Ok(try!(String::from_utf8(result).map_err(|e| Error::Utf8(e))))
}
}
#[cfg(test)]
mod tests {
use super::Salsa20;
use protected::ProtectedStream;
#[test]
fn should_decrypt_password() {
let key = [0xE4, 0x70, 0xC4, 0xEF, 0x95, 0x61, 0x22, 0xDF, 0x2C, 0x0D, 0xD1, 0x42, 0x4A,
0x24, 0xE6, 0x87, 0x79, 0x29, 0xB9, 0xAD, 0x47, 0x9C, 0x0E, 0xA5, 0xA0, 0x5D,
0xB1, 0x27, 0x7A, 0xDF, 0xBD, 0xCD];
let mut salsa20 = Salsa20::new(&key);
let result = salsa20.decrypt("9crW5hp7SQ==").unwrap();
assert_eq!(result, "hunter2");
}
}
|
}
impl Salsa20 {
|
random_line_split
|
main.rs
|
#[macro_use] extern crate error_chain;
#[macro_use] extern crate prettytable;
#[macro_use] extern crate stderr;
extern crate chan;
extern crate clap;
extern crate filetime;
extern crate globset;
extern crate pretty_bytes;
extern crate separator;
#[macro_use] mod errors;
mod compression;
mod lists;
mod structs;
use clap::{App, Arg};
use errors::*;
use globset::{GlobBuilder, GlobSet, GlobSetBuilder};
use lists::*;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::sync::mpsc;
use structs::*;
const DEBUG_FILTERS: bool = cfg!(debug_assertions);
fn debug(message: &str) {
if DEBUG_FILTERS {
errstln!("{}", message);
}
}
quick_main!(run);
fn run() -> Result<()> {
let matches = App::new("static-compress")
.version("0.3.2")
.about("Create statically-compresed copies of matching files")
.author("NeoSmart Technologies")
.arg(Arg::new("compressor")
.short('c')
.long("compressor")
.value_name("[brotli|gzip|zopfli|webp]")
.help("The compressor to use (default: gzip)")
.takes_value(true))
.arg(Arg::new("threads")
.short('j')
.long("threads")
.value_name("COUNT")
.help("The number of simultaneous compressions (default: 1)")
.takes_value(true))
.arg(Arg::new("filters")
.value_name("FILTER")
.multiple_occurrences(true)
.required(true))
.arg(Arg::new("ext")
.short('e')
.value_name("EXT")
.long("extension")
.help("The extension to use for compressed files (default: gz, br, or webp)"))
.arg(Arg::new("quality")
.short('q')
.long("quality")
.takes_value(true)
.help("A quality parameter to be passed to the encoder. Algorithm-specific."))
/*.arg(Arg::new("excludes")
.short('x')
.value_name("FILTER")
.long("exclude")
.multiple(true)
.help("Exclude files matching this glob expression"))*/
.get_matches();
fn get_parameter<'a, T>(matches: &clap::ArgMatches, name: &'static str, default_value: T) -> Result<T>
where T: std::str::FromStr
{
match matches.value_of(name) {
Some(v) => {
Ok(v.parse().map_err(|_| ErrorKind::InvalidParameterValue(name))?)
}
None => Ok(default_value),
}
}
let case_sensitive =!matches.is_present("nocase");
let compressor = get_parameter(&matches, "compressor", CompressionAlgorithm::GZip)?;
let temp = Parameters {
extension: matches.value_of("ext")
.unwrap_or(compressor.extension())
.trim_matches(|c: char| c.is_whitespace() || c.is_control() || c == '.')
.to_owned(),
compressor: compressor,
quality: match matches.value_of("quality") {
Some(q) => Some(q.parse::<u8>().map_err(|_| ErrorKind::InvalidParameterValue("quality"))?),
None => None
},
threads: get_parameter(&matches, "threads", 1)?,
};
/*let exclude_filters = match matches.values_of("exclude") {
Some(values)=> values.map(|s| s.to_owned()).collect(),
None => Vec::<String>::new(),
};*/
let parameters = Arc::<Parameters>::new(temp);
let (send_queue, stats_rx, wait_group) = start_workers(¶meters);
let mut include_filters: Vec<String> = match matches.values_of("filters") {
Some(values) => Ok(values.map(|s| s.to_owned()).collect()),
None => Err(ErrorKind::InvalidUsage),
}?;
let mut builder = GlobSetBuilder::new();
fix_filters(&mut include_filters);
for filter in include_filters.iter() {
let glob = GlobBuilder::new(filter)
.case_insensitive(!case_sensitive)
.literal_separator(true)
.build().map_err(|_| ErrorKind::InvalidIncludeFilter)?;
builder.add(glob);
}
let globset = builder.build().map_err(|_| ErrorKind::InvalidIncludeFilter)?;
//convert filters to paths and deal out conversion jobs
dispatch_jobs(send_queue, include_filters, globset/*, exclude_filters*/)?;
//wait for all jobs to finish
wait_group.wait();
//merge statistics from all threads
let mut stats = Statistics::new();
while let Ok(thread_stats) = stats_rx.recv() {
stats.merge(&thread_stats);
}
println!("{}", stats);
Ok(())
}
type ThreadParam = std::path::PathBuf;
fn start_workers<'a>(params: &Arc<Parameters>) -> (chan::Sender<ThreadParam>, mpsc::Receiver<Statistics>, chan::WaitGroup) {
let (tx, rx) = chan::sync::<ThreadParam>(params.threads);
let (stats_tx, stats_rx) = std::sync::mpsc::channel::<Statistics>();
let wg = chan::WaitGroup::new();
for _ in 0..params.threads {
let local_params = params.clone();
|
let local_stats_tx = stats_tx.clone();
let local_wg = wg.clone();
wg.add(1);
std::thread::spawn(move || {
worker_thread(local_params, local_stats_tx, local_rx);
local_wg.done();
});
}
(tx, stats_rx, wg)
}
fn yield_file<F>(path: PathBuf, globset: &GlobSet, callback: &F) -> Result<()>
where F: Fn(PathBuf) -> Result<()>
{
if is_hidden(&path)? {
//we are ignoring.files and.directories
//we may add a command-line switch to control this behavior in the future
return Ok(());
}
if path.is_dir() {
for child in path.read_dir()? {
let child_path = child?.path();
yield_file(child_path, globset, callback)?;
}
}
else {
//I'm presuming the binary search in is_blacklisted is faster
//than globset.is_match, but we should benchmark it at some point
if!is_blacklisted(&path)? && globset.is_match(&path) {
callback(path)?;
}
}
Ok(())
}
fn dispatch_jobs(send_queue: chan::Sender<ThreadParam>, filters: Vec<String>, globset: GlobSet/*, exclude_filters: Vec<String>*/) -> Result<()> {
let paths = extract_paths(&filters)?;
for path in paths {
yield_file(path, &globset, &|path: PathBuf| {
send_queue.send(path);
Ok(())
})?
}
Ok(())
}
fn worker_thread(params: Arc<Parameters>, stats_tx: mpsc::Sender<Statistics>, rx: chan::Receiver<ThreadParam>) {
let mut local_stats = Statistics::new();
loop {
let src = match rx.recv() {
Some(task) => task,
None => break, //no more tasks
};
//in a nested function so we can handle errors centrally
fn compress_single(src: &ThreadParam, params: &Parameters, mut local_stats: &mut Statistics) -> Result<()> {
let dst_path = format!("{}.{}",
src.to_str().ok_or(ErrorKind::InvalidCharactersInPath)?,
params.extension);
let dst = Path::new(&dst_path);
//again, in a scope for error handling
|local_stats: &mut Statistics| -> Result<()> {
let src_metadata = std::fs::metadata(src)?;
//don't compress files that are already compressed that haven't changed
if let Ok(dst_metadata) = std::fs::metadata(dst) {
//the destination already exists
let src_seconds = src_metadata.modified()?.duration_since(std::time::UNIX_EPOCH)?.as_secs();
let dst_seconds = dst_metadata.modified()?.duration_since(std::time::UNIX_EPOCH)?.as_secs();
match src_seconds == dst_seconds {
true => {
local_stats.update(src_metadata.len(), dst_metadata.len(), false);
return Ok(());//no need to recompress
},
false => {
std::fs::remove_file(dst)?; //throw if we can't
}
};
}
println!("{}", src.to_string_lossy());
params.compressor.compress(src.as_path(), dst, params.quality)?;
let dst_metadata = std::fs::metadata(dst)?;
local_stats.update(src_metadata.len(), dst_metadata.len(), true);
let src_modified = filetime::FileTime::from_last_modification_time(&src_metadata);
filetime::set_file_times(dst, filetime::FileTime::zero(), src_modified).unwrap_or_default();
Ok(())
}(&mut local_stats)
.map_err(|e| {
//try deleting the invalid destination file, but don't care if we can't
std::fs::remove_file(dst).unwrap_or_default();
e //return the same error
})
}
if let Err(e) = compress_single(&src, ¶ms, &mut local_stats) {
errstln!("Error compressing {}: {}", src.to_string_lossy(), e);
}
}
if!stats_tx.send(local_stats).is_ok() {
errstln!("Error compiling statistics!");
}
}
fn str_search(sorted: &[&str], search_term: &str, case_sensitive: bool) -> std::result::Result<usize, usize> {
let term = match case_sensitive {
true => search_term.to_owned(),
false => search_term.to_lowercase(),
};
sorted.binary_search_by(|probe| probe.cmp(&&*term))
}
fn is_hidden(path: &Path) -> Result<bool> {
let hidden = match path.file_name() {
Some(x) => x.to_str().ok_or(ErrorKind::InvalidCharactersInPath)?
.starts_with("."),
None => false
};
Ok(hidden)
}
fn is_blacklisted(path: &Path) -> Result<bool> {
let r = match path.extension() {
Some(x) => {
let ext = x.to_str().ok_or(ErrorKind::InvalidCharactersInPath)?;
str_search(COMP_EXTS, &ext, false).is_ok()
},
None => false,
};
return Ok(r);
}
//pre-pends./ to relative paths
fn fix_filters(filters: &mut Vec<String>) {
for i in 0..filters.len() {
let new_path;
{
let ref path = filters[i];
match path.chars().next().expect("Received blank filter!") {
'.' | '/' => continue,
_ => new_path = format!("./{}", path) //un-prefixed path
}
}
filters[i] = new_path;
}
}
//Given a list of filters, extracts the directories that should be searched
//To-Do: Also provide info about to what depth they should be recursed
use std::collections::HashSet;
fn extract_paths(filters: &Vec<String>) -> Result<HashSet<PathBuf>> {
use std::iter::FromIterator;
let mut dirs = std::collections::HashSet::<PathBuf>::new();
{
let insert_path = &mut |filter: &String, dir: PathBuf| {
debug(&format!("filter {} mapped to search {}", filter, dir.display()));
dirs.insert(dir);
};
for filter in filters {
//take everything until the first expression
let mut last_char = None::<char>;
let dir;
{
let partial = filter.chars().take_while(|c| match c {
&'?' | &'*' | &'{' | &'[' => false,
c => { last_char = Some(c.clone()); true }
});
dir = String::from_iter(partial);
}
let dir = match dir.chars().next() {
Some(c) => match c {
'.' | '/' => PathBuf::from(dir),
_ => {
let mut pb = PathBuf::from("./");
pb.push(dir);
pb
}
},
None => {
insert_path(filter, PathBuf::from("./"));
continue;
}
};
if dir.to_str().ok_or(ErrorKind::InvalidCharactersInPath)?.ends_with(filter) {
//the "dir" is actually a full path to a single file
//return it as-is
insert_path(filter, dir);
continue;
}
if last_char == Some('/') {
//dir is a already a directory, return it as-is
insert_path(filter, dir);
continue;
}
//we need to extract the directory from the path we have
let dir = match PathBuf::from(dir).parent() {
Some(parent) => parent.to_path_buf(),
None => PathBuf::from("./"),
};
insert_path(filter, dir);
}
}
debug(&format!("final search paths: {:?}", dirs));
Ok(dirs)
}
|
let local_rx = rx.clone();
|
random_line_split
|
main.rs
|
#[macro_use] extern crate error_chain;
#[macro_use] extern crate prettytable;
#[macro_use] extern crate stderr;
extern crate chan;
extern crate clap;
extern crate filetime;
extern crate globset;
extern crate pretty_bytes;
extern crate separator;
#[macro_use] mod errors;
mod compression;
mod lists;
mod structs;
use clap::{App, Arg};
use errors::*;
use globset::{GlobBuilder, GlobSet, GlobSetBuilder};
use lists::*;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::sync::mpsc;
use structs::*;
const DEBUG_FILTERS: bool = cfg!(debug_assertions);
fn debug(message: &str) {
if DEBUG_FILTERS {
errstln!("{}", message);
}
}
quick_main!(run);
fn run() -> Result<()> {
let matches = App::new("static-compress")
.version("0.3.2")
.about("Create statically-compresed copies of matching files")
.author("NeoSmart Technologies")
.arg(Arg::new("compressor")
.short('c')
.long("compressor")
.value_name("[brotli|gzip|zopfli|webp]")
.help("The compressor to use (default: gzip)")
.takes_value(true))
.arg(Arg::new("threads")
.short('j')
.long("threads")
.value_name("COUNT")
.help("The number of simultaneous compressions (default: 1)")
.takes_value(true))
.arg(Arg::new("filters")
.value_name("FILTER")
.multiple_occurrences(true)
.required(true))
.arg(Arg::new("ext")
.short('e')
.value_name("EXT")
.long("extension")
.help("The extension to use for compressed files (default: gz, br, or webp)"))
.arg(Arg::new("quality")
.short('q')
.long("quality")
.takes_value(true)
.help("A quality parameter to be passed to the encoder. Algorithm-specific."))
/*.arg(Arg::new("excludes")
.short('x')
.value_name("FILTER")
.long("exclude")
.multiple(true)
.help("Exclude files matching this glob expression"))*/
.get_matches();
fn get_parameter<'a, T>(matches: &clap::ArgMatches, name: &'static str, default_value: T) -> Result<T>
where T: std::str::FromStr
{
match matches.value_of(name) {
Some(v) => {
Ok(v.parse().map_err(|_| ErrorKind::InvalidParameterValue(name))?)
}
None => Ok(default_value),
}
}
let case_sensitive =!matches.is_present("nocase");
let compressor = get_parameter(&matches, "compressor", CompressionAlgorithm::GZip)?;
let temp = Parameters {
extension: matches.value_of("ext")
.unwrap_or(compressor.extension())
.trim_matches(|c: char| c.is_whitespace() || c.is_control() || c == '.')
.to_owned(),
compressor: compressor,
quality: match matches.value_of("quality") {
Some(q) => Some(q.parse::<u8>().map_err(|_| ErrorKind::InvalidParameterValue("quality"))?),
None => None
},
threads: get_parameter(&matches, "threads", 1)?,
};
/*let exclude_filters = match matches.values_of("exclude") {
Some(values)=> values.map(|s| s.to_owned()).collect(),
None => Vec::<String>::new(),
};*/
let parameters = Arc::<Parameters>::new(temp);
let (send_queue, stats_rx, wait_group) = start_workers(¶meters);
let mut include_filters: Vec<String> = match matches.values_of("filters") {
Some(values) => Ok(values.map(|s| s.to_owned()).collect()),
None => Err(ErrorKind::InvalidUsage),
}?;
let mut builder = GlobSetBuilder::new();
fix_filters(&mut include_filters);
for filter in include_filters.iter() {
let glob = GlobBuilder::new(filter)
.case_insensitive(!case_sensitive)
.literal_separator(true)
.build().map_err(|_| ErrorKind::InvalidIncludeFilter)?;
builder.add(glob);
}
let globset = builder.build().map_err(|_| ErrorKind::InvalidIncludeFilter)?;
//convert filters to paths and deal out conversion jobs
dispatch_jobs(send_queue, include_filters, globset/*, exclude_filters*/)?;
//wait for all jobs to finish
wait_group.wait();
//merge statistics from all threads
let mut stats = Statistics::new();
while let Ok(thread_stats) = stats_rx.recv() {
stats.merge(&thread_stats);
}
println!("{}", stats);
Ok(())
}
type ThreadParam = std::path::PathBuf;
fn start_workers<'a>(params: &Arc<Parameters>) -> (chan::Sender<ThreadParam>, mpsc::Receiver<Statistics>, chan::WaitGroup) {
let (tx, rx) = chan::sync::<ThreadParam>(params.threads);
let (stats_tx, stats_rx) = std::sync::mpsc::channel::<Statistics>();
let wg = chan::WaitGroup::new();
for _ in 0..params.threads {
let local_params = params.clone();
let local_rx = rx.clone();
let local_stats_tx = stats_tx.clone();
let local_wg = wg.clone();
wg.add(1);
std::thread::spawn(move || {
worker_thread(local_params, local_stats_tx, local_rx);
local_wg.done();
});
}
(tx, stats_rx, wg)
}
fn yield_file<F>(path: PathBuf, globset: &GlobSet, callback: &F) -> Result<()>
where F: Fn(PathBuf) -> Result<()>
{
if is_hidden(&path)? {
//we are ignoring.files and.directories
//we may add a command-line switch to control this behavior in the future
return Ok(());
}
if path.is_dir() {
for child in path.read_dir()? {
let child_path = child?.path();
yield_file(child_path, globset, callback)?;
}
}
else {
//I'm presuming the binary search in is_blacklisted is faster
//than globset.is_match, but we should benchmark it at some point
if!is_blacklisted(&path)? && globset.is_match(&path) {
callback(path)?;
}
}
Ok(())
}
fn dispatch_jobs(send_queue: chan::Sender<ThreadParam>, filters: Vec<String>, globset: GlobSet/*, exclude_filters: Vec<String>*/) -> Result<()> {
let paths = extract_paths(&filters)?;
for path in paths {
yield_file(path, &globset, &|path: PathBuf| {
send_queue.send(path);
Ok(())
})?
}
Ok(())
}
fn worker_thread(params: Arc<Parameters>, stats_tx: mpsc::Sender<Statistics>, rx: chan::Receiver<ThreadParam>) {
let mut local_stats = Statistics::new();
loop {
let src = match rx.recv() {
Some(task) => task,
None => break, //no more tasks
};
//in a nested function so we can handle errors centrally
fn compress_single(src: &ThreadParam, params: &Parameters, mut local_stats: &mut Statistics) -> Result<()> {
let dst_path = format!("{}.{}",
src.to_str().ok_or(ErrorKind::InvalidCharactersInPath)?,
params.extension);
let dst = Path::new(&dst_path);
//again, in a scope for error handling
|local_stats: &mut Statistics| -> Result<()> {
let src_metadata = std::fs::metadata(src)?;
//don't compress files that are already compressed that haven't changed
if let Ok(dst_metadata) = std::fs::metadata(dst) {
//the destination already exists
let src_seconds = src_metadata.modified()?.duration_since(std::time::UNIX_EPOCH)?.as_secs();
let dst_seconds = dst_metadata.modified()?.duration_since(std::time::UNIX_EPOCH)?.as_secs();
match src_seconds == dst_seconds {
true => {
local_stats.update(src_metadata.len(), dst_metadata.len(), false);
return Ok(());//no need to recompress
},
false => {
std::fs::remove_file(dst)?; //throw if we can't
}
};
}
println!("{}", src.to_string_lossy());
params.compressor.compress(src.as_path(), dst, params.quality)?;
let dst_metadata = std::fs::metadata(dst)?;
local_stats.update(src_metadata.len(), dst_metadata.len(), true);
let src_modified = filetime::FileTime::from_last_modification_time(&src_metadata);
filetime::set_file_times(dst, filetime::FileTime::zero(), src_modified).unwrap_or_default();
Ok(())
}(&mut local_stats)
.map_err(|e| {
//try deleting the invalid destination file, but don't care if we can't
std::fs::remove_file(dst).unwrap_or_default();
e //return the same error
})
}
if let Err(e) = compress_single(&src, ¶ms, &mut local_stats) {
errstln!("Error compressing {}: {}", src.to_string_lossy(), e);
}
}
if!stats_tx.send(local_stats).is_ok() {
errstln!("Error compiling statistics!");
}
}
fn str_search(sorted: &[&str], search_term: &str, case_sensitive: bool) -> std::result::Result<usize, usize> {
let term = match case_sensitive {
true => search_term.to_owned(),
false => search_term.to_lowercase(),
};
sorted.binary_search_by(|probe| probe.cmp(&&*term))
}
fn is_hidden(path: &Path) -> Result<bool> {
let hidden = match path.file_name() {
Some(x) => x.to_str().ok_or(ErrorKind::InvalidCharactersInPath)?
.starts_with("."),
None => false
};
Ok(hidden)
}
fn is_blacklisted(path: &Path) -> Result<bool>
|
//pre-pends./ to relative paths
fn fix_filters(filters: &mut Vec<String>) {
for i in 0..filters.len() {
let new_path;
{
let ref path = filters[i];
match path.chars().next().expect("Received blank filter!") {
'.' | '/' => continue,
_ => new_path = format!("./{}", path) //un-prefixed path
}
}
filters[i] = new_path;
}
}
//Given a list of filters, extracts the directories that should be searched
//To-Do: Also provide info about to what depth they should be recursed
use std::collections::HashSet;
fn extract_paths(filters: &Vec<String>) -> Result<HashSet<PathBuf>> {
use std::iter::FromIterator;
let mut dirs = std::collections::HashSet::<PathBuf>::new();
{
let insert_path = &mut |filter: &String, dir: PathBuf| {
debug(&format!("filter {} mapped to search {}", filter, dir.display()));
dirs.insert(dir);
};
for filter in filters {
//take everything until the first expression
let mut last_char = None::<char>;
let dir;
{
let partial = filter.chars().take_while(|c| match c {
&'?' | &'*' | &'{' | &'[' => false,
c => { last_char = Some(c.clone()); true }
});
dir = String::from_iter(partial);
}
let dir = match dir.chars().next() {
Some(c) => match c {
'.' | '/' => PathBuf::from(dir),
_ => {
let mut pb = PathBuf::from("./");
pb.push(dir);
pb
}
},
None => {
insert_path(filter, PathBuf::from("./"));
continue;
}
};
if dir.to_str().ok_or(ErrorKind::InvalidCharactersInPath)?.ends_with(filter) {
//the "dir" is actually a full path to a single file
//return it as-is
insert_path(filter, dir);
continue;
}
if last_char == Some('/') {
//dir is a already a directory, return it as-is
insert_path(filter, dir);
continue;
}
//we need to extract the directory from the path we have
let dir = match PathBuf::from(dir).parent() {
Some(parent) => parent.to_path_buf(),
None => PathBuf::from("./"),
};
insert_path(filter, dir);
}
}
debug(&format!("final search paths: {:?}", dirs));
Ok(dirs)
}
|
{
let r = match path.extension() {
Some(x) => {
let ext = x.to_str().ok_or(ErrorKind::InvalidCharactersInPath)?;
str_search(COMP_EXTS, &ext, false).is_ok()
},
None => false,
};
return Ok(r);
}
|
identifier_body
|
main.rs
|
#[macro_use] extern crate error_chain;
#[macro_use] extern crate prettytable;
#[macro_use] extern crate stderr;
extern crate chan;
extern crate clap;
extern crate filetime;
extern crate globset;
extern crate pretty_bytes;
extern crate separator;
#[macro_use] mod errors;
mod compression;
mod lists;
mod structs;
use clap::{App, Arg};
use errors::*;
use globset::{GlobBuilder, GlobSet, GlobSetBuilder};
use lists::*;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::sync::mpsc;
use structs::*;
const DEBUG_FILTERS: bool = cfg!(debug_assertions);
fn debug(message: &str) {
if DEBUG_FILTERS {
errstln!("{}", message);
}
}
quick_main!(run);
fn run() -> Result<()> {
let matches = App::new("static-compress")
.version("0.3.2")
.about("Create statically-compresed copies of matching files")
.author("NeoSmart Technologies")
.arg(Arg::new("compressor")
.short('c')
.long("compressor")
.value_name("[brotli|gzip|zopfli|webp]")
.help("The compressor to use (default: gzip)")
.takes_value(true))
.arg(Arg::new("threads")
.short('j')
.long("threads")
.value_name("COUNT")
.help("The number of simultaneous compressions (default: 1)")
.takes_value(true))
.arg(Arg::new("filters")
.value_name("FILTER")
.multiple_occurrences(true)
.required(true))
.arg(Arg::new("ext")
.short('e')
.value_name("EXT")
.long("extension")
.help("The extension to use for compressed files (default: gz, br, or webp)"))
.arg(Arg::new("quality")
.short('q')
.long("quality")
.takes_value(true)
.help("A quality parameter to be passed to the encoder. Algorithm-specific."))
/*.arg(Arg::new("excludes")
.short('x')
.value_name("FILTER")
.long("exclude")
.multiple(true)
.help("Exclude files matching this glob expression"))*/
.get_matches();
fn get_parameter<'a, T>(matches: &clap::ArgMatches, name: &'static str, default_value: T) -> Result<T>
where T: std::str::FromStr
{
match matches.value_of(name) {
Some(v) => {
Ok(v.parse().map_err(|_| ErrorKind::InvalidParameterValue(name))?)
}
None => Ok(default_value),
}
}
let case_sensitive =!matches.is_present("nocase");
let compressor = get_parameter(&matches, "compressor", CompressionAlgorithm::GZip)?;
let temp = Parameters {
extension: matches.value_of("ext")
.unwrap_or(compressor.extension())
.trim_matches(|c: char| c.is_whitespace() || c.is_control() || c == '.')
.to_owned(),
compressor: compressor,
quality: match matches.value_of("quality") {
Some(q) => Some(q.parse::<u8>().map_err(|_| ErrorKind::InvalidParameterValue("quality"))?),
None => None
},
threads: get_parameter(&matches, "threads", 1)?,
};
/*let exclude_filters = match matches.values_of("exclude") {
Some(values)=> values.map(|s| s.to_owned()).collect(),
None => Vec::<String>::new(),
};*/
let parameters = Arc::<Parameters>::new(temp);
let (send_queue, stats_rx, wait_group) = start_workers(¶meters);
let mut include_filters: Vec<String> = match matches.values_of("filters") {
Some(values) => Ok(values.map(|s| s.to_owned()).collect()),
None => Err(ErrorKind::InvalidUsage),
}?;
let mut builder = GlobSetBuilder::new();
fix_filters(&mut include_filters);
for filter in include_filters.iter() {
let glob = GlobBuilder::new(filter)
.case_insensitive(!case_sensitive)
.literal_separator(true)
.build().map_err(|_| ErrorKind::InvalidIncludeFilter)?;
builder.add(glob);
}
let globset = builder.build().map_err(|_| ErrorKind::InvalidIncludeFilter)?;
//convert filters to paths and deal out conversion jobs
dispatch_jobs(send_queue, include_filters, globset/*, exclude_filters*/)?;
//wait for all jobs to finish
wait_group.wait();
//merge statistics from all threads
let mut stats = Statistics::new();
while let Ok(thread_stats) = stats_rx.recv() {
stats.merge(&thread_stats);
}
println!("{}", stats);
Ok(())
}
type ThreadParam = std::path::PathBuf;
fn start_workers<'a>(params: &Arc<Parameters>) -> (chan::Sender<ThreadParam>, mpsc::Receiver<Statistics>, chan::WaitGroup) {
let (tx, rx) = chan::sync::<ThreadParam>(params.threads);
let (stats_tx, stats_rx) = std::sync::mpsc::channel::<Statistics>();
let wg = chan::WaitGroup::new();
for _ in 0..params.threads {
let local_params = params.clone();
let local_rx = rx.clone();
let local_stats_tx = stats_tx.clone();
let local_wg = wg.clone();
wg.add(1);
std::thread::spawn(move || {
worker_thread(local_params, local_stats_tx, local_rx);
local_wg.done();
});
}
(tx, stats_rx, wg)
}
fn
|
<F>(path: PathBuf, globset: &GlobSet, callback: &F) -> Result<()>
where F: Fn(PathBuf) -> Result<()>
{
if is_hidden(&path)? {
//we are ignoring.files and.directories
//we may add a command-line switch to control this behavior in the future
return Ok(());
}
if path.is_dir() {
for child in path.read_dir()? {
let child_path = child?.path();
yield_file(child_path, globset, callback)?;
}
}
else {
//I'm presuming the binary search in is_blacklisted is faster
//than globset.is_match, but we should benchmark it at some point
if!is_blacklisted(&path)? && globset.is_match(&path) {
callback(path)?;
}
}
Ok(())
}
fn dispatch_jobs(send_queue: chan::Sender<ThreadParam>, filters: Vec<String>, globset: GlobSet/*, exclude_filters: Vec<String>*/) -> Result<()> {
let paths = extract_paths(&filters)?;
for path in paths {
yield_file(path, &globset, &|path: PathBuf| {
send_queue.send(path);
Ok(())
})?
}
Ok(())
}
fn worker_thread(params: Arc<Parameters>, stats_tx: mpsc::Sender<Statistics>, rx: chan::Receiver<ThreadParam>) {
let mut local_stats = Statistics::new();
loop {
let src = match rx.recv() {
Some(task) => task,
None => break, //no more tasks
};
//in a nested function so we can handle errors centrally
fn compress_single(src: &ThreadParam, params: &Parameters, mut local_stats: &mut Statistics) -> Result<()> {
let dst_path = format!("{}.{}",
src.to_str().ok_or(ErrorKind::InvalidCharactersInPath)?,
params.extension);
let dst = Path::new(&dst_path);
//again, in a scope for error handling
|local_stats: &mut Statistics| -> Result<()> {
let src_metadata = std::fs::metadata(src)?;
//don't compress files that are already compressed that haven't changed
if let Ok(dst_metadata) = std::fs::metadata(dst) {
//the destination already exists
let src_seconds = src_metadata.modified()?.duration_since(std::time::UNIX_EPOCH)?.as_secs();
let dst_seconds = dst_metadata.modified()?.duration_since(std::time::UNIX_EPOCH)?.as_secs();
match src_seconds == dst_seconds {
true => {
local_stats.update(src_metadata.len(), dst_metadata.len(), false);
return Ok(());//no need to recompress
},
false => {
std::fs::remove_file(dst)?; //throw if we can't
}
};
}
println!("{}", src.to_string_lossy());
params.compressor.compress(src.as_path(), dst, params.quality)?;
let dst_metadata = std::fs::metadata(dst)?;
local_stats.update(src_metadata.len(), dst_metadata.len(), true);
let src_modified = filetime::FileTime::from_last_modification_time(&src_metadata);
filetime::set_file_times(dst, filetime::FileTime::zero(), src_modified).unwrap_or_default();
Ok(())
}(&mut local_stats)
.map_err(|e| {
//try deleting the invalid destination file, but don't care if we can't
std::fs::remove_file(dst).unwrap_or_default();
e //return the same error
})
}
if let Err(e) = compress_single(&src, ¶ms, &mut local_stats) {
errstln!("Error compressing {}: {}", src.to_string_lossy(), e);
}
}
if!stats_tx.send(local_stats).is_ok() {
errstln!("Error compiling statistics!");
}
}
fn str_search(sorted: &[&str], search_term: &str, case_sensitive: bool) -> std::result::Result<usize, usize> {
let term = match case_sensitive {
true => search_term.to_owned(),
false => search_term.to_lowercase(),
};
sorted.binary_search_by(|probe| probe.cmp(&&*term))
}
fn is_hidden(path: &Path) -> Result<bool> {
let hidden = match path.file_name() {
Some(x) => x.to_str().ok_or(ErrorKind::InvalidCharactersInPath)?
.starts_with("."),
None => false
};
Ok(hidden)
}
fn is_blacklisted(path: &Path) -> Result<bool> {
let r = match path.extension() {
Some(x) => {
let ext = x.to_str().ok_or(ErrorKind::InvalidCharactersInPath)?;
str_search(COMP_EXTS, &ext, false).is_ok()
},
None => false,
};
return Ok(r);
}
//pre-pends./ to relative paths
fn fix_filters(filters: &mut Vec<String>) {
for i in 0..filters.len() {
let new_path;
{
let ref path = filters[i];
match path.chars().next().expect("Received blank filter!") {
'.' | '/' => continue,
_ => new_path = format!("./{}", path) //un-prefixed path
}
}
filters[i] = new_path;
}
}
//Given a list of filters, extracts the directories that should be searched
//To-Do: Also provide info about to what depth they should be recursed
use std::collections::HashSet;
fn extract_paths(filters: &Vec<String>) -> Result<HashSet<PathBuf>> {
use std::iter::FromIterator;
let mut dirs = std::collections::HashSet::<PathBuf>::new();
{
let insert_path = &mut |filter: &String, dir: PathBuf| {
debug(&format!("filter {} mapped to search {}", filter, dir.display()));
dirs.insert(dir);
};
for filter in filters {
//take everything until the first expression
let mut last_char = None::<char>;
let dir;
{
let partial = filter.chars().take_while(|c| match c {
&'?' | &'*' | &'{' | &'[' => false,
c => { last_char = Some(c.clone()); true }
});
dir = String::from_iter(partial);
}
let dir = match dir.chars().next() {
Some(c) => match c {
'.' | '/' => PathBuf::from(dir),
_ => {
let mut pb = PathBuf::from("./");
pb.push(dir);
pb
}
},
None => {
insert_path(filter, PathBuf::from("./"));
continue;
}
};
if dir.to_str().ok_or(ErrorKind::InvalidCharactersInPath)?.ends_with(filter) {
//the "dir" is actually a full path to a single file
//return it as-is
insert_path(filter, dir);
continue;
}
if last_char == Some('/') {
//dir is a already a directory, return it as-is
insert_path(filter, dir);
continue;
}
//we need to extract the directory from the path we have
let dir = match PathBuf::from(dir).parent() {
Some(parent) => parent.to_path_buf(),
None => PathBuf::from("./"),
};
insert_path(filter, dir);
}
}
debug(&format!("final search paths: {:?}", dirs));
Ok(dirs)
}
|
yield_file
|
identifier_name
|
hashdb.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Database of byte-slices keyed to their Keccak hash.
use hash::*;
use std::collections::HashMap;
use elastic_array::ElasticArray128;
/// `HashDB` value type.
pub type DBValue = ElasticArray128<u8>;
/// Trait modelling datastore keyed by a 32-byte Keccak hash.
pub trait HashDB: AsHashDB + Send + Sync {
/// Get the keys in the database together with number of underlying references.
fn keys(&self) -> HashMap<H256, i32>;
/// Look up a given hash into the bytes that hash to it, returning None if the
/// hash is not known.
///
/// # Examples
/// ```rust
/// extern crate ethcore_util;
/// use ethcore_util::hashdb::*;
/// use ethcore_util::memorydb::*;
/// fn main() {
/// let mut m = MemoryDB::new();
/// let hello_bytes = "Hello world!".as_bytes();
/// let hash = m.insert(hello_bytes);
/// assert_eq!(m.get(&hash).unwrap(), hello_bytes);
/// }
/// ```
fn get(&self, key: &H256) -> Option<DBValue>;
|
/// Check for the existance of a hash-key.
///
/// # Examples
/// ```rust
/// extern crate ethcore_util;
/// use ethcore_util::hashdb::*;
/// use ethcore_util::memorydb::*;
/// use ethcore_util::sha3::*;
/// fn main() {
/// let mut m = MemoryDB::new();
/// let hello_bytes = "Hello world!".as_bytes();
/// assert!(!m.contains(&hello_bytes.sha3()));
/// let key = m.insert(hello_bytes);
/// assert!(m.contains(&key));
/// m.remove(&key);
/// assert!(!m.contains(&key));
/// }
/// ```
fn contains(&self, key: &H256) -> bool;
/// Insert a datum item into the DB and return the datum's hash for a later lookup. Insertions
/// are counted and the equivalent number of `remove()`s must be performed before the data
/// is considered dead.
///
/// # Examples
/// ```rust
/// extern crate ethcore_util;
/// use ethcore_util::hashdb::*;
/// use ethcore_util::memorydb::*;
/// use ethcore_util::hash::*;
/// fn main() {
/// let mut m = MemoryDB::new();
/// let key = m.insert("Hello world!".as_bytes());
/// assert!(m.contains(&key));
/// }
/// ```
fn insert(&mut self, value: &[u8]) -> H256;
/// Like `insert()`, except you provide the key and the data is all moved.
fn emplace(&mut self, key: H256, value: DBValue);
/// Remove a datum previously inserted. Insertions can be "owed" such that the same number of `insert()`s may
/// happen without the data being eventually being inserted into the DB.
///
/// # Examples
/// ```rust
/// extern crate ethcore_util;
/// use ethcore_util::hashdb::*;
/// use ethcore_util::memorydb::*;
/// use ethcore_util::sha3::*;
/// fn main() {
/// let mut m = MemoryDB::new();
/// let d = "Hello world!".as_bytes();
/// let key = &d.sha3();
/// m.remove(key); // OK - we now owe an insertion.
/// assert!(!m.contains(key));
/// m.insert(d); // OK - now it's "empty" again.
/// assert!(!m.contains(key));
/// m.insert(d); // OK - now we've
/// assert_eq!(m.get(key).unwrap(), d);
/// }
/// ```
fn remove(&mut self, key: &H256);
}
/// Upcast trait.
pub trait AsHashDB {
/// Perform upcast to HashDB for anything that derives from HashDB.
fn as_hashdb(&self) -> &HashDB;
/// Perform mutable upcast to HashDB for anything that derives from HashDB.
fn as_hashdb_mut(&mut self) -> &mut HashDB;
}
impl<T: HashDB> AsHashDB for T {
fn as_hashdb(&self) -> &HashDB {
self
}
fn as_hashdb_mut(&mut self) -> &mut HashDB {
self
}
}
|
random_line_split
|
|
hashdb.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Database of byte-slices keyed to their Keccak hash.
use hash::*;
use std::collections::HashMap;
use elastic_array::ElasticArray128;
/// `HashDB` value type.
pub type DBValue = ElasticArray128<u8>;
/// Trait modelling datastore keyed by a 32-byte Keccak hash.
pub trait HashDB: AsHashDB + Send + Sync {
/// Get the keys in the database together with number of underlying references.
fn keys(&self) -> HashMap<H256, i32>;
/// Look up a given hash into the bytes that hash to it, returning None if the
/// hash is not known.
///
/// # Examples
/// ```rust
/// extern crate ethcore_util;
/// use ethcore_util::hashdb::*;
/// use ethcore_util::memorydb::*;
/// fn main() {
/// let mut m = MemoryDB::new();
/// let hello_bytes = "Hello world!".as_bytes();
/// let hash = m.insert(hello_bytes);
/// assert_eq!(m.get(&hash).unwrap(), hello_bytes);
/// }
/// ```
fn get(&self, key: &H256) -> Option<DBValue>;
/// Check for the existance of a hash-key.
///
/// # Examples
/// ```rust
/// extern crate ethcore_util;
/// use ethcore_util::hashdb::*;
/// use ethcore_util::memorydb::*;
/// use ethcore_util::sha3::*;
/// fn main() {
/// let mut m = MemoryDB::new();
/// let hello_bytes = "Hello world!".as_bytes();
/// assert!(!m.contains(&hello_bytes.sha3()));
/// let key = m.insert(hello_bytes);
/// assert!(m.contains(&key));
/// m.remove(&key);
/// assert!(!m.contains(&key));
/// }
/// ```
fn contains(&self, key: &H256) -> bool;
/// Insert a datum item into the DB and return the datum's hash for a later lookup. Insertions
/// are counted and the equivalent number of `remove()`s must be performed before the data
/// is considered dead.
///
/// # Examples
/// ```rust
/// extern crate ethcore_util;
/// use ethcore_util::hashdb::*;
/// use ethcore_util::memorydb::*;
/// use ethcore_util::hash::*;
/// fn main() {
/// let mut m = MemoryDB::new();
/// let key = m.insert("Hello world!".as_bytes());
/// assert!(m.contains(&key));
/// }
/// ```
fn insert(&mut self, value: &[u8]) -> H256;
/// Like `insert()`, except you provide the key and the data is all moved.
fn emplace(&mut self, key: H256, value: DBValue);
/// Remove a datum previously inserted. Insertions can be "owed" such that the same number of `insert()`s may
/// happen without the data being eventually being inserted into the DB.
///
/// # Examples
/// ```rust
/// extern crate ethcore_util;
/// use ethcore_util::hashdb::*;
/// use ethcore_util::memorydb::*;
/// use ethcore_util::sha3::*;
/// fn main() {
/// let mut m = MemoryDB::new();
/// let d = "Hello world!".as_bytes();
/// let key = &d.sha3();
/// m.remove(key); // OK - we now owe an insertion.
/// assert!(!m.contains(key));
/// m.insert(d); // OK - now it's "empty" again.
/// assert!(!m.contains(key));
/// m.insert(d); // OK - now we've
/// assert_eq!(m.get(key).unwrap(), d);
/// }
/// ```
fn remove(&mut self, key: &H256);
}
/// Upcast trait.
pub trait AsHashDB {
/// Perform upcast to HashDB for anything that derives from HashDB.
fn as_hashdb(&self) -> &HashDB;
/// Perform mutable upcast to HashDB for anything that derives from HashDB.
fn as_hashdb_mut(&mut self) -> &mut HashDB;
}
impl<T: HashDB> AsHashDB for T {
fn as_hashdb(&self) -> &HashDB
|
fn as_hashdb_mut(&mut self) -> &mut HashDB {
self
}
}
|
{
self
}
|
identifier_body
|
hashdb.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Database of byte-slices keyed to their Keccak hash.
use hash::*;
use std::collections::HashMap;
use elastic_array::ElasticArray128;
/// `HashDB` value type.
pub type DBValue = ElasticArray128<u8>;
/// Trait modelling datastore keyed by a 32-byte Keccak hash.
pub trait HashDB: AsHashDB + Send + Sync {
/// Get the keys in the database together with number of underlying references.
fn keys(&self) -> HashMap<H256, i32>;
/// Look up a given hash into the bytes that hash to it, returning None if the
/// hash is not known.
///
/// # Examples
/// ```rust
/// extern crate ethcore_util;
/// use ethcore_util::hashdb::*;
/// use ethcore_util::memorydb::*;
/// fn main() {
/// let mut m = MemoryDB::new();
/// let hello_bytes = "Hello world!".as_bytes();
/// let hash = m.insert(hello_bytes);
/// assert_eq!(m.get(&hash).unwrap(), hello_bytes);
/// }
/// ```
fn get(&self, key: &H256) -> Option<DBValue>;
/// Check for the existance of a hash-key.
///
/// # Examples
/// ```rust
/// extern crate ethcore_util;
/// use ethcore_util::hashdb::*;
/// use ethcore_util::memorydb::*;
/// use ethcore_util::sha3::*;
/// fn main() {
/// let mut m = MemoryDB::new();
/// let hello_bytes = "Hello world!".as_bytes();
/// assert!(!m.contains(&hello_bytes.sha3()));
/// let key = m.insert(hello_bytes);
/// assert!(m.contains(&key));
/// m.remove(&key);
/// assert!(!m.contains(&key));
/// }
/// ```
fn contains(&self, key: &H256) -> bool;
/// Insert a datum item into the DB and return the datum's hash for a later lookup. Insertions
/// are counted and the equivalent number of `remove()`s must be performed before the data
/// is considered dead.
///
/// # Examples
/// ```rust
/// extern crate ethcore_util;
/// use ethcore_util::hashdb::*;
/// use ethcore_util::memorydb::*;
/// use ethcore_util::hash::*;
/// fn main() {
/// let mut m = MemoryDB::new();
/// let key = m.insert("Hello world!".as_bytes());
/// assert!(m.contains(&key));
/// }
/// ```
fn insert(&mut self, value: &[u8]) -> H256;
/// Like `insert()`, except you provide the key and the data is all moved.
fn emplace(&mut self, key: H256, value: DBValue);
/// Remove a datum previously inserted. Insertions can be "owed" such that the same number of `insert()`s may
/// happen without the data being eventually being inserted into the DB.
///
/// # Examples
/// ```rust
/// extern crate ethcore_util;
/// use ethcore_util::hashdb::*;
/// use ethcore_util::memorydb::*;
/// use ethcore_util::sha3::*;
/// fn main() {
/// let mut m = MemoryDB::new();
/// let d = "Hello world!".as_bytes();
/// let key = &d.sha3();
/// m.remove(key); // OK - we now owe an insertion.
/// assert!(!m.contains(key));
/// m.insert(d); // OK - now it's "empty" again.
/// assert!(!m.contains(key));
/// m.insert(d); // OK - now we've
/// assert_eq!(m.get(key).unwrap(), d);
/// }
/// ```
fn remove(&mut self, key: &H256);
}
/// Upcast trait.
pub trait AsHashDB {
/// Perform upcast to HashDB for anything that derives from HashDB.
fn as_hashdb(&self) -> &HashDB;
/// Perform mutable upcast to HashDB for anything that derives from HashDB.
fn as_hashdb_mut(&mut self) -> &mut HashDB;
}
impl<T: HashDB> AsHashDB for T {
fn
|
(&self) -> &HashDB {
self
}
fn as_hashdb_mut(&mut self) -> &mut HashDB {
self
}
}
|
as_hashdb
|
identifier_name
|
lub.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty::{BuiltinBounds};
use middle::ty::RegionVid;
use middle::ty;
use middle::typeck::infer::then;
use middle::typeck::infer::combine::*;
use middle::typeck::infer::glb::Glb;
use middle::typeck::infer::lattice::*;
use middle::typeck::infer::sub::Sub;
use middle::typeck::infer::{cres, InferCtxt};
use middle::typeck::infer::fold_regions_in_sig;
use middle::typeck::infer::{TypeTrace, Subtype};
use middle::typeck::infer::region_inference::RegionMark;
use std::collections::HashMap;
use syntax::ast::{Many, Once, NodeId};
use syntax::ast::{NormalFn, UnsafeFn};
use syntax::ast::{Onceness, FnStyle};
use syntax::ast::{MutMutable, MutImmutable};
use util::ppaux::mt_to_string;
use util::ppaux::Repr;
pub struct Lub<'f>(pub CombineFields<'f>); // least-upper-bound: common supertype
impl<'f> Lub<'f> {
pub fn get_ref<'a>(&'a self) -> &'a CombineFields<'f> { let Lub(ref v) = *self; v }
}
impl<'f> Combine for Lub<'f> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx }
fn tag(&self) -> String { "lub".to_string() }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() }
fn sub<'a>(&'a self) -> Sub<'a> { Sub(self.get_ref().clone()) }
fn lub<'a>(&'a self) -> Lub<'a> { Lub(self.get_ref().clone()) }
fn glb<'a>(&'a self) -> Glb<'a> { Glb(self.get_ref().clone()) }
fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
let tcx = self.get_ref().infcx.tcx;
debug!("{}.mts({}, {})",
self.tag(),
mt_to_string(tcx, a),
mt_to_string(tcx, b));
if a.mutbl!= b.mutbl {
return Err(ty::terr_mutability)
}
let m = a.mutbl;
match m {
MutImmutable => {
self.tys(a.ty, b.ty).and_then(|t| Ok(ty::mt {ty: t, mutbl: m}) )
}
MutMutable => {
self.get_ref().infcx.try(|| {
eq_tys(self, a.ty, b.ty).then(|| {
Ok(ty::mt {ty: a.ty, mutbl: m})
})
}).or_else(|e| Err(e))
}
}
}
fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
self.glb().tys(a, b)
}
fn fn_styles(&self, a: FnStyle, b: FnStyle) -> cres<FnStyle> {
match (a, b) {
(UnsafeFn, _) | (_, UnsafeFn) => Ok(UnsafeFn),
(NormalFn, NormalFn) => Ok(NormalFn),
}
}
fn oncenesses(&self, a: Onceness, b: Onceness) -> cres<Onceness> {
match (a, b) {
(Once, _) | (_, Once) => Ok(Once),
(Many, Many) => Ok(Many)
}
}
fn bounds(&self, a: BuiltinBounds, b: BuiltinBounds) -> cres<BuiltinBounds> {
// More bounds is a subtype of fewer bounds, so
// the LUB (mutual supertype) is the intersection.
Ok(a.intersection(b))
}
fn contraregions(&self, a: ty::Region, b: ty::Region)
-> cres<ty::Region> {
self.glb().regions(a, b)
}
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<ty::Region> {
debug!("{}.regions({}, {})",
self.tag(),
a.repr(self.get_ref().infcx.tcx),
b.repr(self.get_ref().infcx.tcx));
Ok(self.get_ref().infcx.region_vars.lub_regions(Subtype(self.trace()), a, b))
}
fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
// Note: this is a subtle algorithm. For a full explanation,
// please see the large comment in `region_inference.rs`.
// Make a mark so we can examine "all bindings that were
// created as part of this type comparison".
let mark = self.get_ref().infcx.region_vars.mark();
// Instantiate each bound region with a fresh region variable.
let (a_with_fresh, a_map) =
self.get_ref().infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), a);
let (b_with_fresh, _) =
self.get_ref().infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), b);
// Collect constraints.
let sig0 = if_ok!(super_fn_sigs(self, &a_with_fresh, &b_with_fresh));
debug!("sig0 = {}", sig0.repr(self.get_ref().infcx.tcx));
// Generalize the regions appearing in sig0 if possible
let new_vars =
self.get_ref().infcx.region_vars.vars_created_since_mark(mark);
let sig1 =
fold_regions_in_sig(
self.get_ref().infcx.tcx,
&sig0,
|r| generalize_region(self, mark, new_vars.as_slice(),
sig0.binder_id, &a_map, r));
return Ok(sig1);
fn generalize_region(this: &Lub,
mark: RegionMark,
new_vars: &[RegionVid],
new_scope: NodeId,
a_map: &HashMap<ty::BoundRegion, ty::Region>,
r0: ty::Region)
-> ty::Region {
// Regions that pre-dated the LUB computation stay as they are.
if!is_var_in_set(new_vars, r0) {
assert!(!r0.is_bound());
debug!("generalize_region(r0={:?}): not new variable", r0);
return r0;
}
let tainted = this.get_ref().infcx.region_vars.tainted(mark, r0);
// Variables created during LUB computation which are
// *related* to regions that pre-date the LUB computation
// stay as they are.
if!tainted.iter().all(|r| is_var_in_set(new_vars, *r)) {
debug!("generalize_region(r0={:?}): \
non-new-variables found in {:?}",
r0, tainted);
assert!(!r0.is_bound());
return r0;
}
// Otherwise, the variable must be associated with at
// least one of the variables representing bound regions
// in both A and B. Replace the variable with the "first"
// bound region from A that we find it to be associated
// with.
for (a_br, a_r) in a_map.iter() {
if tainted.iter().any(|x| x == a_r)
|
}
this.get_ref().infcx.tcx.sess.span_bug(
this.get_ref().trace.origin.span(),
format!("region {:?} is not associated with \
any bound region from A!",
r0).as_slice())
}
}
fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
super_lattice_tys(self, a, b)
}
}
|
{
debug!("generalize_region(r0={:?}): \
replacing with {:?}, tainted={:?}",
r0, *a_br, tainted);
return ty::ReLateBound(new_scope, *a_br);
}
|
conditional_block
|
lub.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty::{BuiltinBounds};
use middle::ty::RegionVid;
use middle::ty;
use middle::typeck::infer::then;
use middle::typeck::infer::combine::*;
use middle::typeck::infer::glb::Glb;
use middle::typeck::infer::lattice::*;
use middle::typeck::infer::sub::Sub;
use middle::typeck::infer::{cres, InferCtxt};
use middle::typeck::infer::fold_regions_in_sig;
use middle::typeck::infer::{TypeTrace, Subtype};
use middle::typeck::infer::region_inference::RegionMark;
use std::collections::HashMap;
use syntax::ast::{Many, Once, NodeId};
use syntax::ast::{NormalFn, UnsafeFn};
use syntax::ast::{Onceness, FnStyle};
use syntax::ast::{MutMutable, MutImmutable};
use util::ppaux::mt_to_string;
use util::ppaux::Repr;
pub struct Lub<'f>(pub CombineFields<'f>); // least-upper-bound: common supertype
impl<'f> Lub<'f> {
pub fn get_ref<'a>(&'a self) -> &'a CombineFields<'f> { let Lub(ref v) = *self; v }
}
impl<'f> Combine for Lub<'f> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx }
fn tag(&self) -> String { "lub".to_string() }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() }
fn sub<'a>(&'a self) -> Sub<'a> { Sub(self.get_ref().clone()) }
fn lub<'a>(&'a self) -> Lub<'a> { Lub(self.get_ref().clone()) }
fn glb<'a>(&'a self) -> Glb<'a> { Glb(self.get_ref().clone()) }
fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
let tcx = self.get_ref().infcx.tcx;
debug!("{}.mts({}, {})",
self.tag(),
mt_to_string(tcx, a),
mt_to_string(tcx, b));
if a.mutbl!= b.mutbl {
return Err(ty::terr_mutability)
}
let m = a.mutbl;
match m {
MutImmutable => {
self.tys(a.ty, b.ty).and_then(|t| Ok(ty::mt {ty: t, mutbl: m}) )
}
MutMutable => {
self.get_ref().infcx.try(|| {
eq_tys(self, a.ty, b.ty).then(|| {
Ok(ty::mt {ty: a.ty, mutbl: m})
})
}).or_else(|e| Err(e))
}
}
}
fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
self.glb().tys(a, b)
}
fn fn_styles(&self, a: FnStyle, b: FnStyle) -> cres<FnStyle> {
match (a, b) {
(UnsafeFn, _) | (_, UnsafeFn) => Ok(UnsafeFn),
(NormalFn, NormalFn) => Ok(NormalFn),
}
}
fn oncenesses(&self, a: Onceness, b: Onceness) -> cres<Onceness> {
match (a, b) {
(Once, _) | (_, Once) => Ok(Once),
(Many, Many) => Ok(Many)
}
}
fn bounds(&self, a: BuiltinBounds, b: BuiltinBounds) -> cres<BuiltinBounds> {
// More bounds is a subtype of fewer bounds, so
// the LUB (mutual supertype) is the intersection.
Ok(a.intersection(b))
}
fn contraregions(&self, a: ty::Region, b: ty::Region)
-> cres<ty::Region> {
self.glb().regions(a, b)
}
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<ty::Region> {
debug!("{}.regions({}, {})",
self.tag(),
a.repr(self.get_ref().infcx.tcx),
b.repr(self.get_ref().infcx.tcx));
Ok(self.get_ref().infcx.region_vars.lub_regions(Subtype(self.trace()), a, b))
}
fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
// Note: this is a subtle algorithm. For a full explanation,
// please see the large comment in `region_inference.rs`.
// Make a mark so we can examine "all bindings that were
// created as part of this type comparison".
let mark = self.get_ref().infcx.region_vars.mark();
// Instantiate each bound region with a fresh region variable.
let (a_with_fresh, a_map) =
self.get_ref().infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), a);
let (b_with_fresh, _) =
self.get_ref().infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), b);
// Collect constraints.
let sig0 = if_ok!(super_fn_sigs(self, &a_with_fresh, &b_with_fresh));
debug!("sig0 = {}", sig0.repr(self.get_ref().infcx.tcx));
// Generalize the regions appearing in sig0 if possible
let new_vars =
self.get_ref().infcx.region_vars.vars_created_since_mark(mark);
let sig1 =
fold_regions_in_sig(
self.get_ref().infcx.tcx,
&sig0,
|r| generalize_region(self, mark, new_vars.as_slice(),
sig0.binder_id, &a_map, r));
return Ok(sig1);
fn generalize_region(this: &Lub,
|
new_scope: NodeId,
a_map: &HashMap<ty::BoundRegion, ty::Region>,
r0: ty::Region)
-> ty::Region {
// Regions that pre-dated the LUB computation stay as they are.
if!is_var_in_set(new_vars, r0) {
assert!(!r0.is_bound());
debug!("generalize_region(r0={:?}): not new variable", r0);
return r0;
}
let tainted = this.get_ref().infcx.region_vars.tainted(mark, r0);
// Variables created during LUB computation which are
// *related* to regions that pre-date the LUB computation
// stay as they are.
if!tainted.iter().all(|r| is_var_in_set(new_vars, *r)) {
debug!("generalize_region(r0={:?}): \
non-new-variables found in {:?}",
r0, tainted);
assert!(!r0.is_bound());
return r0;
}
// Otherwise, the variable must be associated with at
// least one of the variables representing bound regions
// in both A and B. Replace the variable with the "first"
// bound region from A that we find it to be associated
// with.
for (a_br, a_r) in a_map.iter() {
if tainted.iter().any(|x| x == a_r) {
debug!("generalize_region(r0={:?}): \
replacing with {:?}, tainted={:?}",
r0, *a_br, tainted);
return ty::ReLateBound(new_scope, *a_br);
}
}
this.get_ref().infcx.tcx.sess.span_bug(
this.get_ref().trace.origin.span(),
format!("region {:?} is not associated with \
any bound region from A!",
r0).as_slice())
}
}
fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
super_lattice_tys(self, a, b)
}
}
|
mark: RegionMark,
new_vars: &[RegionVid],
|
random_line_split
|
lub.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty::{BuiltinBounds};
use middle::ty::RegionVid;
use middle::ty;
use middle::typeck::infer::then;
use middle::typeck::infer::combine::*;
use middle::typeck::infer::glb::Glb;
use middle::typeck::infer::lattice::*;
use middle::typeck::infer::sub::Sub;
use middle::typeck::infer::{cres, InferCtxt};
use middle::typeck::infer::fold_regions_in_sig;
use middle::typeck::infer::{TypeTrace, Subtype};
use middle::typeck::infer::region_inference::RegionMark;
use std::collections::HashMap;
use syntax::ast::{Many, Once, NodeId};
use syntax::ast::{NormalFn, UnsafeFn};
use syntax::ast::{Onceness, FnStyle};
use syntax::ast::{MutMutable, MutImmutable};
use util::ppaux::mt_to_string;
use util::ppaux::Repr;
pub struct Lub<'f>(pub CombineFields<'f>); // least-upper-bound: common supertype
impl<'f> Lub<'f> {
pub fn get_ref<'a>(&'a self) -> &'a CombineFields<'f> { let Lub(ref v) = *self; v }
}
impl<'f> Combine for Lub<'f> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx }
fn tag(&self) -> String { "lub".to_string() }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() }
fn sub<'a>(&'a self) -> Sub<'a> { Sub(self.get_ref().clone()) }
fn lub<'a>(&'a self) -> Lub<'a> { Lub(self.get_ref().clone()) }
fn glb<'a>(&'a self) -> Glb<'a> { Glb(self.get_ref().clone()) }
fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
let tcx = self.get_ref().infcx.tcx;
debug!("{}.mts({}, {})",
self.tag(),
mt_to_string(tcx, a),
mt_to_string(tcx, b));
if a.mutbl!= b.mutbl {
return Err(ty::terr_mutability)
}
let m = a.mutbl;
match m {
MutImmutable => {
self.tys(a.ty, b.ty).and_then(|t| Ok(ty::mt {ty: t, mutbl: m}) )
}
MutMutable => {
self.get_ref().infcx.try(|| {
eq_tys(self, a.ty, b.ty).then(|| {
Ok(ty::mt {ty: a.ty, mutbl: m})
})
}).or_else(|e| Err(e))
}
}
}
fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
self.glb().tys(a, b)
}
fn fn_styles(&self, a: FnStyle, b: FnStyle) -> cres<FnStyle> {
match (a, b) {
(UnsafeFn, _) | (_, UnsafeFn) => Ok(UnsafeFn),
(NormalFn, NormalFn) => Ok(NormalFn),
}
}
fn
|
(&self, a: Onceness, b: Onceness) -> cres<Onceness> {
match (a, b) {
(Once, _) | (_, Once) => Ok(Once),
(Many, Many) => Ok(Many)
}
}
fn bounds(&self, a: BuiltinBounds, b: BuiltinBounds) -> cres<BuiltinBounds> {
// More bounds is a subtype of fewer bounds, so
// the LUB (mutual supertype) is the intersection.
Ok(a.intersection(b))
}
fn contraregions(&self, a: ty::Region, b: ty::Region)
-> cres<ty::Region> {
self.glb().regions(a, b)
}
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<ty::Region> {
debug!("{}.regions({}, {})",
self.tag(),
a.repr(self.get_ref().infcx.tcx),
b.repr(self.get_ref().infcx.tcx));
Ok(self.get_ref().infcx.region_vars.lub_regions(Subtype(self.trace()), a, b))
}
fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
// Note: this is a subtle algorithm. For a full explanation,
// please see the large comment in `region_inference.rs`.
// Make a mark so we can examine "all bindings that were
// created as part of this type comparison".
let mark = self.get_ref().infcx.region_vars.mark();
// Instantiate each bound region with a fresh region variable.
let (a_with_fresh, a_map) =
self.get_ref().infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), a);
let (b_with_fresh, _) =
self.get_ref().infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), b);
// Collect constraints.
let sig0 = if_ok!(super_fn_sigs(self, &a_with_fresh, &b_with_fresh));
debug!("sig0 = {}", sig0.repr(self.get_ref().infcx.tcx));
// Generalize the regions appearing in sig0 if possible
let new_vars =
self.get_ref().infcx.region_vars.vars_created_since_mark(mark);
let sig1 =
fold_regions_in_sig(
self.get_ref().infcx.tcx,
&sig0,
|r| generalize_region(self, mark, new_vars.as_slice(),
sig0.binder_id, &a_map, r));
return Ok(sig1);
fn generalize_region(this: &Lub,
mark: RegionMark,
new_vars: &[RegionVid],
new_scope: NodeId,
a_map: &HashMap<ty::BoundRegion, ty::Region>,
r0: ty::Region)
-> ty::Region {
// Regions that pre-dated the LUB computation stay as they are.
if!is_var_in_set(new_vars, r0) {
assert!(!r0.is_bound());
debug!("generalize_region(r0={:?}): not new variable", r0);
return r0;
}
let tainted = this.get_ref().infcx.region_vars.tainted(mark, r0);
// Variables created during LUB computation which are
// *related* to regions that pre-date the LUB computation
// stay as they are.
if!tainted.iter().all(|r| is_var_in_set(new_vars, *r)) {
debug!("generalize_region(r0={:?}): \
non-new-variables found in {:?}",
r0, tainted);
assert!(!r0.is_bound());
return r0;
}
// Otherwise, the variable must be associated with at
// least one of the variables representing bound regions
// in both A and B. Replace the variable with the "first"
// bound region from A that we find it to be associated
// with.
for (a_br, a_r) in a_map.iter() {
if tainted.iter().any(|x| x == a_r) {
debug!("generalize_region(r0={:?}): \
replacing with {:?}, tainted={:?}",
r0, *a_br, tainted);
return ty::ReLateBound(new_scope, *a_br);
}
}
this.get_ref().infcx.tcx.sess.span_bug(
this.get_ref().trace.origin.span(),
format!("region {:?} is not associated with \
any bound region from A!",
r0).as_slice())
}
}
fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
super_lattice_tys(self, a, b)
}
}
|
oncenesses
|
identifier_name
|
lub.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty::{BuiltinBounds};
use middle::ty::RegionVid;
use middle::ty;
use middle::typeck::infer::then;
use middle::typeck::infer::combine::*;
use middle::typeck::infer::glb::Glb;
use middle::typeck::infer::lattice::*;
use middle::typeck::infer::sub::Sub;
use middle::typeck::infer::{cres, InferCtxt};
use middle::typeck::infer::fold_regions_in_sig;
use middle::typeck::infer::{TypeTrace, Subtype};
use middle::typeck::infer::region_inference::RegionMark;
use std::collections::HashMap;
use syntax::ast::{Many, Once, NodeId};
use syntax::ast::{NormalFn, UnsafeFn};
use syntax::ast::{Onceness, FnStyle};
use syntax::ast::{MutMutable, MutImmutable};
use util::ppaux::mt_to_string;
use util::ppaux::Repr;
pub struct Lub<'f>(pub CombineFields<'f>); // least-upper-bound: common supertype
impl<'f> Lub<'f> {
pub fn get_ref<'a>(&'a self) -> &'a CombineFields<'f> { let Lub(ref v) = *self; v }
}
impl<'f> Combine for Lub<'f> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx }
fn tag(&self) -> String { "lub".to_string() }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() }
fn sub<'a>(&'a self) -> Sub<'a> { Sub(self.get_ref().clone()) }
fn lub<'a>(&'a self) -> Lub<'a> { Lub(self.get_ref().clone()) }
fn glb<'a>(&'a self) -> Glb<'a> { Glb(self.get_ref().clone()) }
fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
let tcx = self.get_ref().infcx.tcx;
debug!("{}.mts({}, {})",
self.tag(),
mt_to_string(tcx, a),
mt_to_string(tcx, b));
if a.mutbl!= b.mutbl {
return Err(ty::terr_mutability)
}
let m = a.mutbl;
match m {
MutImmutable => {
self.tys(a.ty, b.ty).and_then(|t| Ok(ty::mt {ty: t, mutbl: m}) )
}
MutMutable => {
self.get_ref().infcx.try(|| {
eq_tys(self, a.ty, b.ty).then(|| {
Ok(ty::mt {ty: a.ty, mutbl: m})
})
}).or_else(|e| Err(e))
}
}
}
fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
self.glb().tys(a, b)
}
fn fn_styles(&self, a: FnStyle, b: FnStyle) -> cres<FnStyle> {
match (a, b) {
(UnsafeFn, _) | (_, UnsafeFn) => Ok(UnsafeFn),
(NormalFn, NormalFn) => Ok(NormalFn),
}
}
fn oncenesses(&self, a: Onceness, b: Onceness) -> cres<Onceness>
|
fn bounds(&self, a: BuiltinBounds, b: BuiltinBounds) -> cres<BuiltinBounds> {
// More bounds is a subtype of fewer bounds, so
// the LUB (mutual supertype) is the intersection.
Ok(a.intersection(b))
}
fn contraregions(&self, a: ty::Region, b: ty::Region)
-> cres<ty::Region> {
self.glb().regions(a, b)
}
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<ty::Region> {
debug!("{}.regions({}, {})",
self.tag(),
a.repr(self.get_ref().infcx.tcx),
b.repr(self.get_ref().infcx.tcx));
Ok(self.get_ref().infcx.region_vars.lub_regions(Subtype(self.trace()), a, b))
}
fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
// Note: this is a subtle algorithm. For a full explanation,
// please see the large comment in `region_inference.rs`.
// Make a mark so we can examine "all bindings that were
// created as part of this type comparison".
let mark = self.get_ref().infcx.region_vars.mark();
// Instantiate each bound region with a fresh region variable.
let (a_with_fresh, a_map) =
self.get_ref().infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), a);
let (b_with_fresh, _) =
self.get_ref().infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), b);
// Collect constraints.
let sig0 = if_ok!(super_fn_sigs(self, &a_with_fresh, &b_with_fresh));
debug!("sig0 = {}", sig0.repr(self.get_ref().infcx.tcx));
// Generalize the regions appearing in sig0 if possible
let new_vars =
self.get_ref().infcx.region_vars.vars_created_since_mark(mark);
let sig1 =
fold_regions_in_sig(
self.get_ref().infcx.tcx,
&sig0,
|r| generalize_region(self, mark, new_vars.as_slice(),
sig0.binder_id, &a_map, r));
return Ok(sig1);
fn generalize_region(this: &Lub,
mark: RegionMark,
new_vars: &[RegionVid],
new_scope: NodeId,
a_map: &HashMap<ty::BoundRegion, ty::Region>,
r0: ty::Region)
-> ty::Region {
// Regions that pre-dated the LUB computation stay as they are.
if!is_var_in_set(new_vars, r0) {
assert!(!r0.is_bound());
debug!("generalize_region(r0={:?}): not new variable", r0);
return r0;
}
let tainted = this.get_ref().infcx.region_vars.tainted(mark, r0);
// Variables created during LUB computation which are
// *related* to regions that pre-date the LUB computation
// stay as they are.
if!tainted.iter().all(|r| is_var_in_set(new_vars, *r)) {
debug!("generalize_region(r0={:?}): \
non-new-variables found in {:?}",
r0, tainted);
assert!(!r0.is_bound());
return r0;
}
// Otherwise, the variable must be associated with at
// least one of the variables representing bound regions
// in both A and B. Replace the variable with the "first"
// bound region from A that we find it to be associated
// with.
for (a_br, a_r) in a_map.iter() {
if tainted.iter().any(|x| x == a_r) {
debug!("generalize_region(r0={:?}): \
replacing with {:?}, tainted={:?}",
r0, *a_br, tainted);
return ty::ReLateBound(new_scope, *a_br);
}
}
this.get_ref().infcx.tcx.sess.span_bug(
this.get_ref().trace.origin.span(),
format!("region {:?} is not associated with \
any bound region from A!",
r0).as_slice())
}
}
fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
super_lattice_tys(self, a, b)
}
}
|
{
match (a, b) {
(Once, _) | (_, Once) => Ok(Once),
(Many, Many) => Ok(Many)
}
}
|
identifier_body
|
mocking.rs
|
use crate::mock_store::{MockLayer, MockStore};
use std::any::{Any, TypeId};
use std::marker::PhantomData;
use std::mem::transmute;
/// Trait for setting up mocks
///
/// The trait is implemented for all functions, so its methods can be called on any function.
///
/// Note: methods have any effect only if called on functions [annotated as mockable](https://docs.rs/mocktopus_macros).
pub trait Mockable<T, O> {
/// Core function for setting up mocks
///
/// Always consider using [mock_safe](#tymethod.mock_safe) or [MockContext](struct.MockContext.html).
///
/// The passed closure is called whenever the mocked function is called. Depending on variant of returned
/// [MockResult](enum.MockResult.html) the mocked function continues to run or returns immediately.
/// In case of continuation the function arguments can be modified or replaced.
///
/// The mock closure is saved in a
/// [thread local static storage](https://doc.rust-lang.org/std/macro.thread_local.html),
/// so it has effect only in thread, where it was set.
/// Each Rust test is executed in separate thread, so mocks do not leak between them.
/// # Safety
/// It is up to the user to make sure, that the closure is valid long enough to serve all calls to mocked function.
/// If the mock closure uses any non-static values or references, it will silently become invalid at some point of
/// host thread lifetime.
///
/// ```
/// #[mockable]
/// fn get_string(context: &Context) -> &String {
/// context.get_string()
/// }
///
/// #[test]
/// fn get_string_test() {
/// let mocked = "mocked".to_string();
/// unsafe {
/// get_string.mock_raw(|_| MockResult::Return(&mocked));
/// }
///
/// assert_eq!("mocked", get_string(&Context::default()));
/// }
/// ```
unsafe fn mock_raw<M: FnMut<T, Output = MockResult<T, O>>>(&self, mock: M);
/// A safe variant of [mock_raw](#tymethod.mock_raw) for static closures
///
/// The safety is guaranteed by forcing passed closure to be static.
|
///
/// ```
/// #[mockable]
/// fn get_string() -> String {
/// "not mocked".to_string()
/// }
///
/// #[test]
/// fn get_string_test() {
/// get_string.mock_safe(|| MockResult::Return("mocked".to_string()));
///
/// assert_eq!("mocked", get_string());
/// }
/// ```
fn mock_safe<M: FnMut<T, Output = MockResult<T, O>> +'static>(&self, mock: M);
/// Stop mocking this function.
///
/// All future invocations will be forwarded to the real implementation.
fn clear_mock(&self);
#[doc(hidden)]
/// Called before every execution of a mockable function. Checks if mock is set and if it is, calls it.
fn call_mock(&self, input: T) -> MockResult<T, O>;
#[doc(hidden)]
/// Returns a unique ID of the function, which is used to set and get its mock.
unsafe fn get_mock_id(&self) -> TypeId;
}
/// Controls mocked function behavior when returned from [mock closure](trait.Mockable.html)
pub enum MockResult<T, O> {
/// Function runs normally as if it was called with given arguments.
/// The arguments are passed inside enum variant as a tuple.
Continue(T),
/// Function returns immediately with a given value. The returned value is passed inside enum variant.
Return(O),
}
thread_local! {
static MOCK_STORE: MockStore = MockStore::default()
}
/// Clear all mocks in the ThreadLocal; only necessary if tests share threads
pub fn clear_mocks() {
MOCK_STORE.with(|mock_store| mock_store.clear())
}
impl<T, O, F: FnOnce<T, Output = O>> Mockable<T, O> for F {
unsafe fn mock_raw<M: FnMut<T, Output = MockResult<T, O>>>(&self, mock: M) {
let id = self.get_mock_id();
let boxed = Box::new(mock) as Box<dyn FnMut<_, Output = _>>;
let static_boxed: Box<dyn FnMut<T, Output = MockResult<T, O>> +'static> = transmute(boxed);
MOCK_STORE.with(|mock_store| mock_store.add_to_thread_layer(id, static_boxed))
}
fn mock_safe<M: FnMut<T, Output = MockResult<T, O>> +'static>(&self, mock: M) {
unsafe { self.mock_raw(mock) }
}
fn clear_mock(&self) {
let id = unsafe { self.get_mock_id() };
MOCK_STORE.with(|mock_store| mock_store.clear_id(id))
}
fn call_mock(&self, input: T) -> MockResult<T, O> {
unsafe {
let id = self.get_mock_id();
MOCK_STORE.with(|mock_store| mock_store.call(id, input))
}
}
unsafe fn get_mock_id(&self) -> TypeId {
(|| ()).type_id()
}
}
/// `MockContext` allows for safe capture of local variables.
///
/// It does this by forcing only mocking the actual function while in the body
/// of [run](#tymethod.run).
///
/// # Examples
///
/// Simple function replacement:
///
/// ```
/// use mocktopus::macros::mockable;
/// use mocktopus::mocking::{MockContext, MockResult};
///
/// #[mockable]
/// fn f() -> i32 {
/// 0
/// }
///
/// MockContext::new()
/// .mock_safe(f, || MockResult::Return(1))
/// .run(|| {
/// assert_eq!(f(), 1);
/// });
/// ```
///
/// Using local variables:
///
/// ```
/// use mocktopus::macros::mockable;
/// use mocktopus::mocking::{MockContext, MockResult};
///
/// #[mockable]
/// fn as_str(s: &String) -> &str {
/// &s
/// }
///
/// let mut count = 0;
/// MockContext::new()
/// .mock_safe(as_str, |s| { count += 1; MockResult::Return(&s) })
/// .run(|| {
/// assert_eq!(as_str(&"abc".to_string()), "abc");
/// });
/// assert_eq!(count, 1);
/// ```
#[derive(Default)]
pub struct MockContext<'a> {
mock_layer: MockLayer,
phantom_lifetime: PhantomData<&'a ()>,
}
impl<'a> MockContext<'a> {
/// Create a new MockContext object.
pub fn new() -> Self {
Self::default()
}
/// Set up a function to be mocked.
///
/// This function doesn't actually mock the function. It registers it as a
/// function that will be mocked when [`run`](#method.run) is called.
pub fn mock_safe<I, O, F, M>(self, mockable: F, mock: M) -> Self
where
F: Mockable<I, O>,
M: FnMut<I, Output = MockResult<I, O>> + 'a,
{
unsafe { self.mock_raw(mockable, mock) }
}
/// Set up a function to be mocked.
///
/// This is an unsafe version of [`mock_safe`](#method.mock_safe),
/// without lifetime constraint on mock
pub unsafe fn mock_raw<I, O, F, M>(mut self, mockable: F, mock: M) -> Self
where
F: Mockable<I, O>,
M: FnMut<I, Output = MockResult<I, O>>,
{
let mock_box = Box::new(mock) as Box<dyn FnMut<_, Output = _>>;
let mock_box_static: Box<dyn FnMut<I, Output = MockResult<I, O>> +'static> =
std::mem::transmute(mock_box);
self.mock_layer.add(mockable.get_mock_id(), mock_box_static);
self
}
/// Run the function while mocking all the functions.
///
/// This function will mock all functions registered for mocking, run the
/// function passed in, then deregister those functions. It does this in a
/// panic-safe way. Note that functions are only mocked in the current
/// thread and other threads may invoke the real implementations.
///
/// Register a function for mocking with [`mock_safe`](#method.mock_safe).
pub fn run<T, F: FnOnce() -> T>(self, f: F) -> T {
MOCK_STORE.with(|mock_store| unsafe { mock_store.add_layer(self.mock_layer) });
let _mock_level_guard = MockLayerGuard;
f()
}
}
struct MockLayerGuard;
impl<'a> Drop for MockLayerGuard {
fn drop(&mut self) {
MOCK_STORE.with(|mock_store| unsafe { mock_store.remove_layer() });
}
}
|
/// This eliminates the problem of using non-static values, which may not live long enough.
|
random_line_split
|
mocking.rs
|
use crate::mock_store::{MockLayer, MockStore};
use std::any::{Any, TypeId};
use std::marker::PhantomData;
use std::mem::transmute;
/// Trait for setting up mocks
///
/// The trait is implemented for all functions, so its methods can be called on any function.
///
/// Note: methods have any effect only if called on functions [annotated as mockable](https://docs.rs/mocktopus_macros).
pub trait Mockable<T, O> {
/// Core function for setting up mocks
///
/// Always consider using [mock_safe](#tymethod.mock_safe) or [MockContext](struct.MockContext.html).
///
/// The passed closure is called whenever the mocked function is called. Depending on variant of returned
/// [MockResult](enum.MockResult.html) the mocked function continues to run or returns immediately.
/// In case of continuation the function arguments can be modified or replaced.
///
/// The mock closure is saved in a
/// [thread local static storage](https://doc.rust-lang.org/std/macro.thread_local.html),
/// so it has effect only in thread, where it was set.
/// Each Rust test is executed in separate thread, so mocks do not leak between them.
/// # Safety
/// It is up to the user to make sure, that the closure is valid long enough to serve all calls to mocked function.
/// If the mock closure uses any non-static values or references, it will silently become invalid at some point of
/// host thread lifetime.
///
/// ```
/// #[mockable]
/// fn get_string(context: &Context) -> &String {
/// context.get_string()
/// }
///
/// #[test]
/// fn get_string_test() {
/// let mocked = "mocked".to_string();
/// unsafe {
/// get_string.mock_raw(|_| MockResult::Return(&mocked));
/// }
///
/// assert_eq!("mocked", get_string(&Context::default()));
/// }
/// ```
unsafe fn mock_raw<M: FnMut<T, Output = MockResult<T, O>>>(&self, mock: M);
/// A safe variant of [mock_raw](#tymethod.mock_raw) for static closures
///
/// The safety is guaranteed by forcing passed closure to be static.
/// This eliminates the problem of using non-static values, which may not live long enough.
///
/// ```
/// #[mockable]
/// fn get_string() -> String {
/// "not mocked".to_string()
/// }
///
/// #[test]
/// fn get_string_test() {
/// get_string.mock_safe(|| MockResult::Return("mocked".to_string()));
///
/// assert_eq!("mocked", get_string());
/// }
/// ```
fn mock_safe<M: FnMut<T, Output = MockResult<T, O>> +'static>(&self, mock: M);
/// Stop mocking this function.
///
/// All future invocations will be forwarded to the real implementation.
fn clear_mock(&self);
#[doc(hidden)]
/// Called before every execution of a mockable function. Checks if mock is set and if it is, calls it.
fn call_mock(&self, input: T) -> MockResult<T, O>;
#[doc(hidden)]
/// Returns a unique ID of the function, which is used to set and get its mock.
unsafe fn get_mock_id(&self) -> TypeId;
}
/// Controls mocked function behavior when returned from [mock closure](trait.Mockable.html)
pub enum MockResult<T, O> {
/// Function runs normally as if it was called with given arguments.
/// The arguments are passed inside enum variant as a tuple.
Continue(T),
/// Function returns immediately with a given value. The returned value is passed inside enum variant.
Return(O),
}
thread_local! {
static MOCK_STORE: MockStore = MockStore::default()
}
/// Clear all mocks in the ThreadLocal; only necessary if tests share threads
pub fn clear_mocks() {
MOCK_STORE.with(|mock_store| mock_store.clear())
}
impl<T, O, F: FnOnce<T, Output = O>> Mockable<T, O> for F {
unsafe fn mock_raw<M: FnMut<T, Output = MockResult<T, O>>>(&self, mock: M)
|
fn mock_safe<M: FnMut<T, Output = MockResult<T, O>> +'static>(&self, mock: M) {
unsafe { self.mock_raw(mock) }
}
fn clear_mock(&self) {
let id = unsafe { self.get_mock_id() };
MOCK_STORE.with(|mock_store| mock_store.clear_id(id))
}
fn call_mock(&self, input: T) -> MockResult<T, O> {
unsafe {
let id = self.get_mock_id();
MOCK_STORE.with(|mock_store| mock_store.call(id, input))
}
}
unsafe fn get_mock_id(&self) -> TypeId {
(|| ()).type_id()
}
}
/// `MockContext` allows for safe capture of local variables.
///
/// It does this by forcing only mocking the actual function while in the body
/// of [run](#tymethod.run).
///
/// # Examples
///
/// Simple function replacement:
///
/// ```
/// use mocktopus::macros::mockable;
/// use mocktopus::mocking::{MockContext, MockResult};
///
/// #[mockable]
/// fn f() -> i32 {
/// 0
/// }
///
/// MockContext::new()
/// .mock_safe(f, || MockResult::Return(1))
/// .run(|| {
/// assert_eq!(f(), 1);
/// });
/// ```
///
/// Using local variables:
///
/// ```
/// use mocktopus::macros::mockable;
/// use mocktopus::mocking::{MockContext, MockResult};
///
/// #[mockable]
/// fn as_str(s: &String) -> &str {
/// &s
/// }
///
/// let mut count = 0;
/// MockContext::new()
/// .mock_safe(as_str, |s| { count += 1; MockResult::Return(&s) })
/// .run(|| {
/// assert_eq!(as_str(&"abc".to_string()), "abc");
/// });
/// assert_eq!(count, 1);
/// ```
#[derive(Default)]
pub struct MockContext<'a> {
mock_layer: MockLayer,
phantom_lifetime: PhantomData<&'a ()>,
}
impl<'a> MockContext<'a> {
/// Create a new MockContext object.
pub fn new() -> Self {
Self::default()
}
/// Set up a function to be mocked.
///
/// This function doesn't actually mock the function. It registers it as a
/// function that will be mocked when [`run`](#method.run) is called.
pub fn mock_safe<I, O, F, M>(self, mockable: F, mock: M) -> Self
where
F: Mockable<I, O>,
M: FnMut<I, Output = MockResult<I, O>> + 'a,
{
unsafe { self.mock_raw(mockable, mock) }
}
/// Set up a function to be mocked.
///
/// This is an unsafe version of [`mock_safe`](#method.mock_safe),
/// without lifetime constraint on mock
pub unsafe fn mock_raw<I, O, F, M>(mut self, mockable: F, mock: M) -> Self
where
F: Mockable<I, O>,
M: FnMut<I, Output = MockResult<I, O>>,
{
let mock_box = Box::new(mock) as Box<dyn FnMut<_, Output = _>>;
let mock_box_static: Box<dyn FnMut<I, Output = MockResult<I, O>> +'static> =
std::mem::transmute(mock_box);
self.mock_layer.add(mockable.get_mock_id(), mock_box_static);
self
}
/// Run the function while mocking all the functions.
///
/// This function will mock all functions registered for mocking, run the
/// function passed in, then deregister those functions. It does this in a
/// panic-safe way. Note that functions are only mocked in the current
/// thread and other threads may invoke the real implementations.
///
/// Register a function for mocking with [`mock_safe`](#method.mock_safe).
pub fn run<T, F: FnOnce() -> T>(self, f: F) -> T {
MOCK_STORE.with(|mock_store| unsafe { mock_store.add_layer(self.mock_layer) });
let _mock_level_guard = MockLayerGuard;
f()
}
}
struct MockLayerGuard;
impl<'a> Drop for MockLayerGuard {
fn drop(&mut self) {
MOCK_STORE.with(|mock_store| unsafe { mock_store.remove_layer() });
}
}
|
{
let id = self.get_mock_id();
let boxed = Box::new(mock) as Box<dyn FnMut<_, Output = _>>;
let static_boxed: Box<dyn FnMut<T, Output = MockResult<T, O>> + 'static> = transmute(boxed);
MOCK_STORE.with(|mock_store| mock_store.add_to_thread_layer(id, static_boxed))
}
|
identifier_body
|
mocking.rs
|
use crate::mock_store::{MockLayer, MockStore};
use std::any::{Any, TypeId};
use std::marker::PhantomData;
use std::mem::transmute;
/// Trait for setting up mocks
///
/// The trait is implemented for all functions, so its methods can be called on any function.
///
/// Note: methods have any effect only if called on functions [annotated as mockable](https://docs.rs/mocktopus_macros).
pub trait Mockable<T, O> {
/// Core function for setting up mocks
///
/// Always consider using [mock_safe](#tymethod.mock_safe) or [MockContext](struct.MockContext.html).
///
/// The passed closure is called whenever the mocked function is called. Depending on variant of returned
/// [MockResult](enum.MockResult.html) the mocked function continues to run or returns immediately.
/// In case of continuation the function arguments can be modified or replaced.
///
/// The mock closure is saved in a
/// [thread local static storage](https://doc.rust-lang.org/std/macro.thread_local.html),
/// so it has effect only in thread, where it was set.
/// Each Rust test is executed in separate thread, so mocks do not leak between them.
/// # Safety
/// It is up to the user to make sure, that the closure is valid long enough to serve all calls to mocked function.
/// If the mock closure uses any non-static values or references, it will silently become invalid at some point of
/// host thread lifetime.
///
/// ```
/// #[mockable]
/// fn get_string(context: &Context) -> &String {
/// context.get_string()
/// }
///
/// #[test]
/// fn get_string_test() {
/// let mocked = "mocked".to_string();
/// unsafe {
/// get_string.mock_raw(|_| MockResult::Return(&mocked));
/// }
///
/// assert_eq!("mocked", get_string(&Context::default()));
/// }
/// ```
unsafe fn mock_raw<M: FnMut<T, Output = MockResult<T, O>>>(&self, mock: M);
/// A safe variant of [mock_raw](#tymethod.mock_raw) for static closures
///
/// The safety is guaranteed by forcing passed closure to be static.
/// This eliminates the problem of using non-static values, which may not live long enough.
///
/// ```
/// #[mockable]
/// fn get_string() -> String {
/// "not mocked".to_string()
/// }
///
/// #[test]
/// fn get_string_test() {
/// get_string.mock_safe(|| MockResult::Return("mocked".to_string()));
///
/// assert_eq!("mocked", get_string());
/// }
/// ```
fn mock_safe<M: FnMut<T, Output = MockResult<T, O>> +'static>(&self, mock: M);
/// Stop mocking this function.
///
/// All future invocations will be forwarded to the real implementation.
fn clear_mock(&self);
#[doc(hidden)]
/// Called before every execution of a mockable function. Checks if mock is set and if it is, calls it.
fn call_mock(&self, input: T) -> MockResult<T, O>;
#[doc(hidden)]
/// Returns a unique ID of the function, which is used to set and get its mock.
unsafe fn get_mock_id(&self) -> TypeId;
}
/// Controls mocked function behavior when returned from [mock closure](trait.Mockable.html)
pub enum MockResult<T, O> {
/// Function runs normally as if it was called with given arguments.
/// The arguments are passed inside enum variant as a tuple.
Continue(T),
/// Function returns immediately with a given value. The returned value is passed inside enum variant.
Return(O),
}
thread_local! {
static MOCK_STORE: MockStore = MockStore::default()
}
/// Clear all mocks in the ThreadLocal; only necessary if tests share threads
pub fn clear_mocks() {
MOCK_STORE.with(|mock_store| mock_store.clear())
}
impl<T, O, F: FnOnce<T, Output = O>> Mockable<T, O> for F {
unsafe fn
|
<M: FnMut<T, Output = MockResult<T, O>>>(&self, mock: M) {
let id = self.get_mock_id();
let boxed = Box::new(mock) as Box<dyn FnMut<_, Output = _>>;
let static_boxed: Box<dyn FnMut<T, Output = MockResult<T, O>> +'static> = transmute(boxed);
MOCK_STORE.with(|mock_store| mock_store.add_to_thread_layer(id, static_boxed))
}
fn mock_safe<M: FnMut<T, Output = MockResult<T, O>> +'static>(&self, mock: M) {
unsafe { self.mock_raw(mock) }
}
fn clear_mock(&self) {
let id = unsafe { self.get_mock_id() };
MOCK_STORE.with(|mock_store| mock_store.clear_id(id))
}
fn call_mock(&self, input: T) -> MockResult<T, O> {
unsafe {
let id = self.get_mock_id();
MOCK_STORE.with(|mock_store| mock_store.call(id, input))
}
}
unsafe fn get_mock_id(&self) -> TypeId {
(|| ()).type_id()
}
}
/// `MockContext` allows for safe capture of local variables.
///
/// It does this by forcing only mocking the actual function while in the body
/// of [run](#tymethod.run).
///
/// # Examples
///
/// Simple function replacement:
///
/// ```
/// use mocktopus::macros::mockable;
/// use mocktopus::mocking::{MockContext, MockResult};
///
/// #[mockable]
/// fn f() -> i32 {
/// 0
/// }
///
/// MockContext::new()
/// .mock_safe(f, || MockResult::Return(1))
/// .run(|| {
/// assert_eq!(f(), 1);
/// });
/// ```
///
/// Using local variables:
///
/// ```
/// use mocktopus::macros::mockable;
/// use mocktopus::mocking::{MockContext, MockResult};
///
/// #[mockable]
/// fn as_str(s: &String) -> &str {
/// &s
/// }
///
/// let mut count = 0;
/// MockContext::new()
/// .mock_safe(as_str, |s| { count += 1; MockResult::Return(&s) })
/// .run(|| {
/// assert_eq!(as_str(&"abc".to_string()), "abc");
/// });
/// assert_eq!(count, 1);
/// ```
#[derive(Default)]
pub struct MockContext<'a> {
mock_layer: MockLayer,
phantom_lifetime: PhantomData<&'a ()>,
}
impl<'a> MockContext<'a> {
/// Create a new MockContext object.
pub fn new() -> Self {
Self::default()
}
/// Set up a function to be mocked.
///
/// This function doesn't actually mock the function. It registers it as a
/// function that will be mocked when [`run`](#method.run) is called.
pub fn mock_safe<I, O, F, M>(self, mockable: F, mock: M) -> Self
where
F: Mockable<I, O>,
M: FnMut<I, Output = MockResult<I, O>> + 'a,
{
unsafe { self.mock_raw(mockable, mock) }
}
/// Set up a function to be mocked.
///
/// This is an unsafe version of [`mock_safe`](#method.mock_safe),
/// without lifetime constraint on mock
pub unsafe fn mock_raw<I, O, F, M>(mut self, mockable: F, mock: M) -> Self
where
F: Mockable<I, O>,
M: FnMut<I, Output = MockResult<I, O>>,
{
let mock_box = Box::new(mock) as Box<dyn FnMut<_, Output = _>>;
let mock_box_static: Box<dyn FnMut<I, Output = MockResult<I, O>> +'static> =
std::mem::transmute(mock_box);
self.mock_layer.add(mockable.get_mock_id(), mock_box_static);
self
}
/// Run the function while mocking all the functions.
///
/// This function will mock all functions registered for mocking, run the
/// function passed in, then deregister those functions. It does this in a
/// panic-safe way. Note that functions are only mocked in the current
/// thread and other threads may invoke the real implementations.
///
/// Register a function for mocking with [`mock_safe`](#method.mock_safe).
pub fn run<T, F: FnOnce() -> T>(self, f: F) -> T {
MOCK_STORE.with(|mock_store| unsafe { mock_store.add_layer(self.mock_layer) });
let _mock_level_guard = MockLayerGuard;
f()
}
}
struct MockLayerGuard;
impl<'a> Drop for MockLayerGuard {
fn drop(&mut self) {
MOCK_STORE.with(|mock_store| unsafe { mock_store.remove_layer() });
}
}
|
mock_raw
|
identifier_name
|
bench.rs
|
#![feature(test)]
extern crate test;
use std::mem::replace;
use test::Bencher;
// bench: find the `BENCH_SIZE` first terms of the fibonacci sequence
static BENCH_SIZE: usize = 20;
// recursive fibonacci
fn fibonacci(n: usize) -> u32 {
if n < 2
|
else {
fibonacci(n - 1) + fibonacci(n - 2)
}
}
// iterative fibonacci
struct Fibonacci {
curr: u32,
next: u32,
}
impl Iterator for Fibonacci {
type Item = u32;
fn next(&mut self) -> Option<u32> {
let new_next = self.curr + self.next;
let new_curr = replace(&mut self.next, new_next);
Some(replace(&mut self.curr, new_curr))
}
}
fn fibonacci_sequence() -> Fibonacci {
Fibonacci { curr: 1, next: 1 }
}
// function to benchmark must be annotated with `#[bench]`
#[bench]
fn recursive_fibonacci(b: &mut Bencher) {
// exact code to benchmark must be passed as a closure to the iter
// method of Bencher
b.iter(|| {
(0..BENCH_SIZE).map(fibonacci).collect::<Vec<u32>>()
})
}
#[bench]
fn iterative_fibonacci(b: &mut Bencher) {
b.iter(|| {
fibonacci_sequence().take(BENCH_SIZE).collect::<Vec<u32>>()
})
}
|
{
1
}
|
conditional_block
|
bench.rs
|
#![feature(test)]
extern crate test;
use std::mem::replace;
use test::Bencher;
// bench: find the `BENCH_SIZE` first terms of the fibonacci sequence
static BENCH_SIZE: usize = 20;
// recursive fibonacci
fn fibonacci(n: usize) -> u32 {
if n < 2 {
1
} else {
fibonacci(n - 1) + fibonacci(n - 2)
}
}
// iterative fibonacci
struct Fibonacci {
curr: u32,
next: u32,
}
impl Iterator for Fibonacci {
type Item = u32;
fn next(&mut self) -> Option<u32> {
let new_next = self.curr + self.next;
let new_curr = replace(&mut self.next, new_next);
Some(replace(&mut self.curr, new_curr))
}
}
fn fibonacci_sequence() -> Fibonacci {
Fibonacci { curr: 1, next: 1 }
}
// function to benchmark must be annotated with `#[bench]`
#[bench]
fn recursive_fibonacci(b: &mut Bencher) {
// exact code to benchmark must be passed as a closure to the iter
// method of Bencher
b.iter(|| {
(0..BENCH_SIZE).map(fibonacci).collect::<Vec<u32>>()
})
}
#[bench]
fn iterative_fibonacci(b: &mut Bencher) {
b.iter(|| {
fibonacci_sequence().take(BENCH_SIZE).collect::<Vec<u32>>()
|
})
}
|
random_line_split
|
|
bench.rs
|
#![feature(test)]
extern crate test;
use std::mem::replace;
use test::Bencher;
// bench: find the `BENCH_SIZE` first terms of the fibonacci sequence
static BENCH_SIZE: usize = 20;
// recursive fibonacci
fn fibonacci(n: usize) -> u32 {
if n < 2 {
1
} else {
fibonacci(n - 1) + fibonacci(n - 2)
}
}
// iterative fibonacci
struct Fibonacci {
curr: u32,
next: u32,
}
impl Iterator for Fibonacci {
type Item = u32;
fn next(&mut self) -> Option<u32>
|
}
fn fibonacci_sequence() -> Fibonacci {
Fibonacci { curr: 1, next: 1 }
}
// function to benchmark must be annotated with `#[bench]`
#[bench]
fn recursive_fibonacci(b: &mut Bencher) {
// exact code to benchmark must be passed as a closure to the iter
// method of Bencher
b.iter(|| {
(0..BENCH_SIZE).map(fibonacci).collect::<Vec<u32>>()
})
}
#[bench]
fn iterative_fibonacci(b: &mut Bencher) {
b.iter(|| {
fibonacci_sequence().take(BENCH_SIZE).collect::<Vec<u32>>()
})
}
|
{
let new_next = self.curr + self.next;
let new_curr = replace(&mut self.next, new_next);
Some(replace(&mut self.curr, new_curr))
}
|
identifier_body
|
bench.rs
|
#![feature(test)]
extern crate test;
use std::mem::replace;
use test::Bencher;
// bench: find the `BENCH_SIZE` first terms of the fibonacci sequence
static BENCH_SIZE: usize = 20;
// recursive fibonacci
fn
|
(n: usize) -> u32 {
if n < 2 {
1
} else {
fibonacci(n - 1) + fibonacci(n - 2)
}
}
// iterative fibonacci
struct Fibonacci {
curr: u32,
next: u32,
}
impl Iterator for Fibonacci {
type Item = u32;
fn next(&mut self) -> Option<u32> {
let new_next = self.curr + self.next;
let new_curr = replace(&mut self.next, new_next);
Some(replace(&mut self.curr, new_curr))
}
}
fn fibonacci_sequence() -> Fibonacci {
Fibonacci { curr: 1, next: 1 }
}
// function to benchmark must be annotated with `#[bench]`
#[bench]
fn recursive_fibonacci(b: &mut Bencher) {
// exact code to benchmark must be passed as a closure to the iter
// method of Bencher
b.iter(|| {
(0..BENCH_SIZE).map(fibonacci).collect::<Vec<u32>>()
})
}
#[bench]
fn iterative_fibonacci(b: &mut Bencher) {
b.iter(|| {
fibonacci_sequence().take(BENCH_SIZE).collect::<Vec<u32>>()
})
}
|
fibonacci
|
identifier_name
|
series.rs
|
use std::ops::{Add, Div, Mul};
pub fn sum_of_i<T, R, Q>(n: T) -> Q::Output
where
T: Add<Output = R> + Copy + Mul<R, Output = Q> + From<u8>,
Q: Div + From<u8>,
{
((n) * (n + T::from(1_u8))) / Q::from(2_u8)
}
pub fn sum_of_squares_i<T>(n: T) -> T
where
T: Add<Output = T> + Copy + Mul<Output = T> + From<u8> + Div<Output = T>,
{
n * (n + T::from(1_u8)) * ((T::from(2) * n) + T::from(1)) / T::from(6)
}
#[cfg(test)]
mod tests {
use super::*;
fn naive_sum_of_i(n: u64) -> u64 {
let v = 1..=n;
v.sum()
}
fn naive_sum_of_squares(n: u64) -> u64 {
let v = 1..=n;
v.map(|v| v * v).sum()
}
#[test]
fn sum_of_i_base_cases() {
assert_eq!(sum_of_i(1_u64), 1);
assert_eq!(sum_of_i(2_u64), 3);
assert_eq!(sum_of_i(3_u64), 6);
}
#[test]
fn sum_of_i_is_correct_for_some_mid_size_numbers()
|
#[test]
fn sum_of_squares_base_cases() {
assert_eq!(sum_of_squares_i(1), 1);
assert_eq!(sum_of_squares_i(0), 0);
assert_eq!(sum_of_squares_i(2), 5);
assert_eq!(sum_of_squares_i(5), 55);
}
#[test]
fn sum_of_squares_is_correct_for_some_mid_sized_numbers() {
assert_eq!(sum_of_squares_i(100), naive_sum_of_squares(100));
assert_eq!(sum_of_squares_i(200), naive_sum_of_squares(200));
}
}
|
{
assert_eq!(sum_of_i(100_u64), naive_sum_of_i(100));
assert_eq!(sum_of_i(200_u64), naive_sum_of_i(200));
}
|
identifier_body
|
series.rs
|
use std::ops::{Add, Div, Mul};
pub fn sum_of_i<T, R, Q>(n: T) -> Q::Output
where
T: Add<Output = R> + Copy + Mul<R, Output = Q> + From<u8>,
Q: Div + From<u8>,
{
((n) * (n + T::from(1_u8))) / Q::from(2_u8)
}
pub fn sum_of_squares_i<T>(n: T) -> T
where
T: Add<Output = T> + Copy + Mul<Output = T> + From<u8> + Div<Output = T>,
{
n * (n + T::from(1_u8)) * ((T::from(2) * n) + T::from(1)) / T::from(6)
}
#[cfg(test)]
mod tests {
use super::*;
fn naive_sum_of_i(n: u64) -> u64 {
let v = 1..=n;
v.sum()
}
fn naive_sum_of_squares(n: u64) -> u64 {
let v = 1..=n;
v.map(|v| v * v).sum()
}
#[test]
fn sum_of_i_base_cases() {
assert_eq!(sum_of_i(1_u64), 1);
assert_eq!(sum_of_i(2_u64), 3);
assert_eq!(sum_of_i(3_u64), 6);
}
#[test]
fn sum_of_i_is_correct_for_some_mid_size_numbers() {
assert_eq!(sum_of_i(100_u64), naive_sum_of_i(100));
assert_eq!(sum_of_i(200_u64), naive_sum_of_i(200));
}
#[test]
fn sum_of_squares_base_cases() {
assert_eq!(sum_of_squares_i(1), 1);
assert_eq!(sum_of_squares_i(0), 0);
assert_eq!(sum_of_squares_i(2), 5);
assert_eq!(sum_of_squares_i(5), 55);
}
#[test]
fn
|
() {
assert_eq!(sum_of_squares_i(100), naive_sum_of_squares(100));
assert_eq!(sum_of_squares_i(200), naive_sum_of_squares(200));
}
}
|
sum_of_squares_is_correct_for_some_mid_sized_numbers
|
identifier_name
|
series.rs
|
use std::ops::{Add, Div, Mul};
pub fn sum_of_i<T, R, Q>(n: T) -> Q::Output
where
T: Add<Output = R> + Copy + Mul<R, Output = Q> + From<u8>,
Q: Div + From<u8>,
{
((n) * (n + T::from(1_u8))) / Q::from(2_u8)
}
pub fn sum_of_squares_i<T>(n: T) -> T
where
T: Add<Output = T> + Copy + Mul<Output = T> + From<u8> + Div<Output = T>,
{
n * (n + T::from(1_u8)) * ((T::from(2) * n) + T::from(1)) / T::from(6)
}
#[cfg(test)]
mod tests {
use super::*;
fn naive_sum_of_i(n: u64) -> u64 {
let v = 1..=n;
v.sum()
}
fn naive_sum_of_squares(n: u64) -> u64 {
let v = 1..=n;
v.map(|v| v * v).sum()
}
#[test]
fn sum_of_i_base_cases() {
assert_eq!(sum_of_i(1_u64), 1);
assert_eq!(sum_of_i(2_u64), 3);
assert_eq!(sum_of_i(3_u64), 6);
|
fn sum_of_i_is_correct_for_some_mid_size_numbers() {
assert_eq!(sum_of_i(100_u64), naive_sum_of_i(100));
assert_eq!(sum_of_i(200_u64), naive_sum_of_i(200));
}
#[test]
fn sum_of_squares_base_cases() {
assert_eq!(sum_of_squares_i(1), 1);
assert_eq!(sum_of_squares_i(0), 0);
assert_eq!(sum_of_squares_i(2), 5);
assert_eq!(sum_of_squares_i(5), 55);
}
#[test]
fn sum_of_squares_is_correct_for_some_mid_sized_numbers() {
assert_eq!(sum_of_squares_i(100), naive_sum_of_squares(100));
assert_eq!(sum_of_squares_i(200), naive_sum_of_squares(200));
}
}
|
}
#[test]
|
random_line_split
|
eq_and_partial_eq.rs
|
// Why is this trait called PartialEq?
// The traditional mathematical definition of an equivalence relation, of which equality is one instance, imposes three requirements.
// For any values x and y:
// • If x == y is true, then y == x must be true as well.
// In other words, swapping the two sides of an equality comparison doesn’t affect the result.
//
// • If x == y and y == z, then it must be the case that x == z.
// Given any chain of values, each equal to the next, each value in the chain is directly equal to every other.
// Equality is contagious.
//
// • It must always be true that x == x.
// That last requirement might seem too obvious to be worth stating, but this is exactly where things go awry.
// Rust’s f32 and f64 are IEEE standard floating-point values.
// According to that standard, expressions like 0.0/0.0 and others with no appropriate value must produce special not-a-number values, usually referred to as NaN values.
// The standard further requires that a NaN value be treated as unequal to every other value — including itself.
|
// PartialEq is Eq, but without x == x guarantee. (Wow, spirit of Haskell I feel).
|
// Eq is full equlality constraint.
|
random_line_split
|
intobject.rs
|
use libc::{c_char, c_int, c_long, c_ulong, c_ulonglong, size_t};
use crate::object::*;
use crate::pyport::Py_ssize_t;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct PyIntObject {
#[cfg(py_sys_config = "Py_TRACE_REFS")]
pub _ob_next: *mut PyObject,
#[cfg(py_sys_config = "Py_TRACE_REFS")]
pub _ob_prev: *mut PyObject,
pub ob_refcnt: Py_ssize_t,
pub ob_type: *mut PyTypeObject,
pub ob_ival: c_long,
}
#[cfg_attr(windows, link(name = "pythonXY"))]
extern "C" {
pub static mut PyInt_Type: PyTypeObject;
}
#[inline(always)]
pub unsafe fn PyInt_Check(op: *mut PyObject) -> c_int {
PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_INT_SUBCLASS)
}
#[inline(always)]
pub unsafe fn PyInt_CheckExact(op: *mut PyObject) -> c_int {
let u: *mut PyTypeObject = &mut PyInt_Type;
(Py_TYPE(op) == u) as c_int
}
#[cfg_attr(windows, link(name = "pythonXY"))]
extern "C" {
pub fn PyInt_FromString(str: *mut c_char, pend: *mut *mut c_char, base: c_int)
-> *mut PyObject;
#[cfg(py_sys_config = "Py_USING_UNICODE")]
pub fn PyInt_FromUnicode(
u: *mut crate::unicodeobject::Py_UNICODE,
length: Py_ssize_t,
base: c_int,
) -> *mut PyObject;
pub fn PyInt_FromLong(ival: c_long) -> *mut PyObject;
pub fn PyInt_FromSize_t(ival: size_t) -> *mut PyObject;
pub fn PyInt_FromSsize_t(ival: Py_ssize_t) -> *mut PyObject;
pub fn PyInt_AsLong(io: *mut PyObject) -> c_long;
pub fn PyInt_AsSsize_t(io: *mut PyObject) -> Py_ssize_t;
fn _PyInt_AsInt(io: *mut PyObject) -> c_int;
pub fn PyInt_AsUnsignedLongMask(io: *mut PyObject) -> c_ulong;
pub fn PyInt_AsUnsignedLongLongMask(io: *mut PyObject) -> c_ulonglong;
pub fn PyInt_GetMax() -> c_long;
ignore! {
fn PyOS_strtoul(arg1: *mut c_char, arg2: *mut *mut c_char, arg3: c_int) -> c_ulong;
fn PyOS_strtol(arg1: *mut c_char, arg2: *mut *mut c_char, arg3: c_int) -> c_long;
}
pub fn PyInt_ClearFreeList() -> c_int;
ignore! {
fn _PyInt_Format(v: *mut PyIntObject, base: c_int, newstyle: c_int) -> *mut PyObject;
fn _PyInt_FormatAdvanced(
obj: *mut PyObject,
format_spec: *mut c_char,
format_spec_len: Py_ssize_t,
) -> *mut PyObject;
}
}
pub unsafe fn PyInt_AS_LONG(io: *mut PyObject) -> c_long
|
{
(*(io as *mut PyIntObject)).ob_ival
}
|
identifier_body
|
|
intobject.rs
|
use libc::{c_char, c_int, c_long, c_ulong, c_ulonglong, size_t};
use crate::object::*;
use crate::pyport::Py_ssize_t;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct PyIntObject {
#[cfg(py_sys_config = "Py_TRACE_REFS")]
pub _ob_next: *mut PyObject,
#[cfg(py_sys_config = "Py_TRACE_REFS")]
pub _ob_prev: *mut PyObject,
pub ob_refcnt: Py_ssize_t,
pub ob_type: *mut PyTypeObject,
pub ob_ival: c_long,
}
#[cfg_attr(windows, link(name = "pythonXY"))]
extern "C" {
pub static mut PyInt_Type: PyTypeObject;
}
#[inline(always)]
pub unsafe fn PyInt_Check(op: *mut PyObject) -> c_int {
PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_INT_SUBCLASS)
}
#[inline(always)]
pub unsafe fn PyInt_CheckExact(op: *mut PyObject) -> c_int {
let u: *mut PyTypeObject = &mut PyInt_Type;
(Py_TYPE(op) == u) as c_int
}
#[cfg_attr(windows, link(name = "pythonXY"))]
extern "C" {
pub fn PyInt_FromString(str: *mut c_char, pend: *mut *mut c_char, base: c_int)
-> *mut PyObject;
#[cfg(py_sys_config = "Py_USING_UNICODE")]
pub fn PyInt_FromUnicode(
u: *mut crate::unicodeobject::Py_UNICODE,
length: Py_ssize_t,
base: c_int,
) -> *mut PyObject;
pub fn PyInt_FromLong(ival: c_long) -> *mut PyObject;
pub fn PyInt_FromSize_t(ival: size_t) -> *mut PyObject;
pub fn PyInt_FromSsize_t(ival: Py_ssize_t) -> *mut PyObject;
pub fn PyInt_AsLong(io: *mut PyObject) -> c_long;
pub fn PyInt_AsSsize_t(io: *mut PyObject) -> Py_ssize_t;
fn _PyInt_AsInt(io: *mut PyObject) -> c_int;
pub fn PyInt_AsUnsignedLongMask(io: *mut PyObject) -> c_ulong;
pub fn PyInt_AsUnsignedLongLongMask(io: *mut PyObject) -> c_ulonglong;
pub fn PyInt_GetMax() -> c_long;
ignore! {
fn PyOS_strtoul(arg1: *mut c_char, arg2: *mut *mut c_char, arg3: c_int) -> c_ulong;
fn PyOS_strtol(arg1: *mut c_char, arg2: *mut *mut c_char, arg3: c_int) -> c_long;
}
pub fn PyInt_ClearFreeList() -> c_int;
ignore! {
fn _PyInt_Format(v: *mut PyIntObject, base: c_int, newstyle: c_int) -> *mut PyObject;
fn _PyInt_FormatAdvanced(
obj: *mut PyObject,
format_spec: *mut c_char,
format_spec_len: Py_ssize_t,
) -> *mut PyObject;
}
}
pub unsafe fn
|
(io: *mut PyObject) -> c_long {
(*(io as *mut PyIntObject)).ob_ival
}
|
PyInt_AS_LONG
|
identifier_name
|
intobject.rs
|
use libc::{c_char, c_int, c_long, c_ulong, c_ulonglong, size_t};
use crate::object::*;
use crate::pyport::Py_ssize_t;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct PyIntObject {
#[cfg(py_sys_config = "Py_TRACE_REFS")]
pub _ob_next: *mut PyObject,
#[cfg(py_sys_config = "Py_TRACE_REFS")]
pub _ob_prev: *mut PyObject,
pub ob_refcnt: Py_ssize_t,
pub ob_type: *mut PyTypeObject,
pub ob_ival: c_long,
}
#[cfg_attr(windows, link(name = "pythonXY"))]
extern "C" {
pub static mut PyInt_Type: PyTypeObject;
}
#[inline(always)]
pub unsafe fn PyInt_Check(op: *mut PyObject) -> c_int {
PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_INT_SUBCLASS)
}
#[inline(always)]
pub unsafe fn PyInt_CheckExact(op: *mut PyObject) -> c_int {
let u: *mut PyTypeObject = &mut PyInt_Type;
(Py_TYPE(op) == u) as c_int
}
#[cfg_attr(windows, link(name = "pythonXY"))]
extern "C" {
pub fn PyInt_FromString(str: *mut c_char, pend: *mut *mut c_char, base: c_int)
-> *mut PyObject;
#[cfg(py_sys_config = "Py_USING_UNICODE")]
pub fn PyInt_FromUnicode(
u: *mut crate::unicodeobject::Py_UNICODE,
length: Py_ssize_t,
base: c_int,
) -> *mut PyObject;
pub fn PyInt_FromLong(ival: c_long) -> *mut PyObject;
pub fn PyInt_FromSize_t(ival: size_t) -> *mut PyObject;
pub fn PyInt_FromSsize_t(ival: Py_ssize_t) -> *mut PyObject;
pub fn PyInt_AsLong(io: *mut PyObject) -> c_long;
pub fn PyInt_AsSsize_t(io: *mut PyObject) -> Py_ssize_t;
fn _PyInt_AsInt(io: *mut PyObject) -> c_int;
pub fn PyInt_AsUnsignedLongMask(io: *mut PyObject) -> c_ulong;
pub fn PyInt_AsUnsignedLongLongMask(io: *mut PyObject) -> c_ulonglong;
pub fn PyInt_GetMax() -> c_long;
ignore! {
fn PyOS_strtoul(arg1: *mut c_char, arg2: *mut *mut c_char, arg3: c_int) -> c_ulong;
fn PyOS_strtol(arg1: *mut c_char, arg2: *mut *mut c_char, arg3: c_int) -> c_long;
}
pub fn PyInt_ClearFreeList() -> c_int;
ignore! {
fn _PyInt_Format(v: *mut PyIntObject, base: c_int, newstyle: c_int) -> *mut PyObject;
fn _PyInt_FormatAdvanced(
|
}
pub unsafe fn PyInt_AS_LONG(io: *mut PyObject) -> c_long {
(*(io as *mut PyIntObject)).ob_ival
}
|
obj: *mut PyObject,
format_spec: *mut c_char,
format_spec_len: Py_ssize_t,
) -> *mut PyObject;
}
|
random_line_split
|
method-on-generic-struct.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-android: FIXME(#10381)
// min-lldb-version: 310
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// STACK BY REF
// gdb-command:print *self
// gdb-check:$1 = {x = {8888, -8888}}
// gdb-command:print arg1
// gdb-check:$2 = -1
// gdb-command:print arg2
// gdb-check:$3 = -2
// gdb-command:continue
// STACK BY VAL
// gdb-command:print self
// gdb-check:$4 = {x = {8888, -8888}}
// gdb-command:print arg1
// gdb-check:$5 = -3
// gdb-command:print arg2
// gdb-check:$6 = -4
// gdb-command:continue
// OWNED BY REF
// gdb-command:print *self
// gdb-check:$7 = {x = 1234.5}
// gdb-command:print arg1
// gdb-check:$8 = -5
// gdb-command:print arg2
// gdb-check:$9 = -6
// gdb-command:continue
// OWNED BY VAL
// gdb-command:print self
// gdb-check:$10 = {x = 1234.5}
// gdb-command:print arg1
// gdb-check:$11 = -7
// gdb-command:print arg2
// gdb-check:$12 = -8
// gdb-command:continue
// OWNED MOVED
// gdb-command:print *self
// gdb-check:$13 = {x = 1234.5}
// gdb-command:print arg1
// gdb-check:$14 = -9
// gdb-command:print arg2
// gdb-check:$15 = -10
// gdb-command:continue
// === LLDB TESTS ==================================================================================
// lldb-command:run
// STACK BY REF
// lldb-command:print *self
// lldb-check:[...]$0 = Struct<(u32, i32)> { x: (8888, -8888) }
// lldb-command:print arg1
// lldb-check:[...]$1 = -1
// lldb-command:print arg2
// lldb-check:[...]$2 = -2
// lldb-command:continue
// STACK BY VAL
// lldb-command:print self
// lldb-check:[...]$3 = Struct<(u32, i32)> { x: (8888, -8888) }
// lldb-command:print arg1
// lldb-check:[...]$4 = -3
// lldb-command:print arg2
// lldb-check:[...]$5 = -4
// lldb-command:continue
// OWNED BY REF
// lldb-command:print *self
// lldb-check:[...]$6 = Struct<f64> { x: 1234.5 }
// lldb-command:print arg1
// lldb-check:[...]$7 = -5
// lldb-command:print arg2
// lldb-check:[...]$8 = -6
// lldb-command:continue
// OWNED BY VAL
// lldb-command:print self
// lldb-check:[...]$9 = Struct<f64> { x: 1234.5 }
// lldb-command:print arg1
// lldb-check:[...]$10 = -7
// lldb-command:print arg2
// lldb-check:[...]$11 = -8
// lldb-command:continue
// OWNED MOVED
// lldb-command:print *self
// lldb-check:[...]$12 = Struct<f64> { x: 1234.5 }
// lldb-command:print arg1
// lldb-check:[...]$13 = -9
// lldb-command:print arg2
// lldb-check:[...]$14 = -10
// lldb-command:continue
#![feature(box_syntax)]
#![omit_gdb_pretty_printer_section]
#[derive(Copy)]
struct Struct<T> {
x: T
}
impl<T> Struct<T> {
fn self_by_ref(&self, arg1: int, arg2: int) -> int {
zzz(); // #break
arg1 + arg2
}
fn self_by_val(self, arg1: int, arg2: int) -> int {
zzz(); // #break
arg1 + arg2
}
fn
|
(self: Box<Struct<T>>, arg1: int, arg2: int) -> int {
zzz(); // #break
arg1 + arg2
}
}
fn main() {
let stack = Struct { x: (8888_u32, -8888_i32) };
let _ = stack.self_by_ref(-1, -2);
let _ = stack.self_by_val(-3, -4);
let owned = box Struct { x: 1234.5f64 };
let _ = owned.self_by_ref(-5, -6);
let _ = owned.self_by_val(-7, -8);
let _ = owned.self_owned(-9, -10);
}
fn zzz() {()}
|
self_owned
|
identifier_name
|
method-on-generic-struct.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-android: FIXME(#10381)
// min-lldb-version: 310
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// STACK BY REF
// gdb-command:print *self
// gdb-check:$1 = {x = {8888, -8888}}
// gdb-command:print arg1
// gdb-check:$2 = -1
// gdb-command:print arg2
// gdb-check:$3 = -2
// gdb-command:continue
// STACK BY VAL
// gdb-command:print self
// gdb-check:$4 = {x = {8888, -8888}}
// gdb-command:print arg1
// gdb-check:$5 = -3
// gdb-command:print arg2
// gdb-check:$6 = -4
// gdb-command:continue
// OWNED BY REF
// gdb-command:print *self
// gdb-check:$7 = {x = 1234.5}
// gdb-command:print arg1
// gdb-check:$8 = -5
// gdb-command:print arg2
// gdb-check:$9 = -6
// gdb-command:continue
// OWNED BY VAL
// gdb-command:print self
// gdb-check:$10 = {x = 1234.5}
// gdb-command:print arg1
// gdb-check:$11 = -7
// gdb-command:print arg2
// gdb-check:$12 = -8
// gdb-command:continue
// OWNED MOVED
// gdb-command:print *self
// gdb-check:$13 = {x = 1234.5}
// gdb-command:print arg1
// gdb-check:$14 = -9
// gdb-command:print arg2
// gdb-check:$15 = -10
// gdb-command:continue
// === LLDB TESTS ==================================================================================
// lldb-command:run
// STACK BY REF
// lldb-command:print *self
// lldb-check:[...]$0 = Struct<(u32, i32)> { x: (8888, -8888) }
// lldb-command:print arg1
// lldb-check:[...]$1 = -1
// lldb-command:print arg2
// lldb-check:[...]$2 = -2
// lldb-command:continue
// STACK BY VAL
// lldb-command:print self
// lldb-check:[...]$3 = Struct<(u32, i32)> { x: (8888, -8888) }
// lldb-command:print arg1
// lldb-check:[...]$4 = -3
// lldb-command:print arg2
// lldb-check:[...]$5 = -4
// lldb-command:continue
// OWNED BY REF
// lldb-command:print *self
// lldb-check:[...]$6 = Struct<f64> { x: 1234.5 }
// lldb-command:print arg1
// lldb-check:[...]$7 = -5
// lldb-command:print arg2
// lldb-check:[...]$8 = -6
// lldb-command:continue
// OWNED BY VAL
// lldb-command:print self
// lldb-check:[...]$9 = Struct<f64> { x: 1234.5 }
// lldb-command:print arg1
// lldb-check:[...]$10 = -7
// lldb-command:print arg2
// lldb-check:[...]$11 = -8
// lldb-command:continue
// OWNED MOVED
// lldb-command:print *self
// lldb-check:[...]$12 = Struct<f64> { x: 1234.5 }
// lldb-command:print arg1
// lldb-check:[...]$13 = -9
// lldb-command:print arg2
// lldb-check:[...]$14 = -10
// lldb-command:continue
#![feature(box_syntax)]
#![omit_gdb_pretty_printer_section]
#[derive(Copy)]
struct Struct<T> {
x: T
}
impl<T> Struct<T> {
fn self_by_ref(&self, arg1: int, arg2: int) -> int {
zzz(); // #break
arg1 + arg2
}
fn self_by_val(self, arg1: int, arg2: int) -> int {
zzz(); // #break
arg1 + arg2
}
fn self_owned(self: Box<Struct<T>>, arg1: int, arg2: int) -> int {
zzz(); // #break
arg1 + arg2
}
}
fn main() {
let stack = Struct { x: (8888_u32, -8888_i32) };
let _ = stack.self_by_ref(-1, -2);
let _ = stack.self_by_val(-3, -4);
let owned = box Struct { x: 1234.5f64 };
let _ = owned.self_by_ref(-5, -6);
let _ = owned.self_by_val(-7, -8);
let _ = owned.self_owned(-9, -10);
}
fn zzz()
|
{()}
|
identifier_body
|
|
method-on-generic-struct.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-android: FIXME(#10381)
// min-lldb-version: 310
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// STACK BY REF
// gdb-command:print *self
// gdb-check:$1 = {x = {8888, -8888}}
// gdb-command:print arg1
// gdb-check:$2 = -1
// gdb-command:print arg2
// gdb-check:$3 = -2
// gdb-command:continue
// STACK BY VAL
// gdb-command:print self
// gdb-check:$4 = {x = {8888, -8888}}
// gdb-command:print arg1
// gdb-check:$5 = -3
// gdb-command:print arg2
// gdb-check:$6 = -4
// gdb-command:continue
// OWNED BY REF
// gdb-command:print *self
// gdb-check:$7 = {x = 1234.5}
// gdb-command:print arg1
// gdb-check:$8 = -5
// gdb-command:print arg2
// gdb-check:$9 = -6
// gdb-command:continue
// OWNED BY VAL
// gdb-command:print self
// gdb-check:$10 = {x = 1234.5}
// gdb-command:print arg1
// gdb-check:$11 = -7
// gdb-command:print arg2
// gdb-check:$12 = -8
// gdb-command:continue
// OWNED MOVED
// gdb-command:print *self
// gdb-check:$13 = {x = 1234.5}
// gdb-command:print arg1
// gdb-check:$14 = -9
// gdb-command:print arg2
// gdb-check:$15 = -10
// gdb-command:continue
// === LLDB TESTS ==================================================================================
// lldb-command:run
// STACK BY REF
// lldb-command:print *self
// lldb-check:[...]$0 = Struct<(u32, i32)> { x: (8888, -8888) }
// lldb-command:print arg1
// lldb-check:[...]$1 = -1
// lldb-command:print arg2
// lldb-check:[...]$2 = -2
// lldb-command:continue
// STACK BY VAL
// lldb-command:print self
// lldb-check:[...]$3 = Struct<(u32, i32)> { x: (8888, -8888) }
// lldb-command:print arg1
// lldb-check:[...]$4 = -3
// lldb-command:print arg2
// lldb-check:[...]$5 = -4
// lldb-command:continue
// OWNED BY REF
// lldb-command:print *self
// lldb-check:[...]$6 = Struct<f64> { x: 1234.5 }
// lldb-command:print arg1
// lldb-check:[...]$7 = -5
// lldb-command:print arg2
// lldb-check:[...]$8 = -6
// lldb-command:continue
// OWNED BY VAL
// lldb-command:print self
// lldb-check:[...]$9 = Struct<f64> { x: 1234.5 }
// lldb-command:print arg1
// lldb-check:[...]$10 = -7
// lldb-command:print arg2
// lldb-check:[...]$11 = -8
// lldb-command:continue
// OWNED MOVED
// lldb-command:print *self
// lldb-check:[...]$12 = Struct<f64> { x: 1234.5 }
// lldb-command:print arg1
// lldb-check:[...]$13 = -9
// lldb-command:print arg2
// lldb-check:[...]$14 = -10
// lldb-command:continue
#![feature(box_syntax)]
#![omit_gdb_pretty_printer_section]
#[derive(Copy)]
struct Struct<T> {
x: T
|
impl<T> Struct<T> {
fn self_by_ref(&self, arg1: int, arg2: int) -> int {
zzz(); // #break
arg1 + arg2
}
fn self_by_val(self, arg1: int, arg2: int) -> int {
zzz(); // #break
arg1 + arg2
}
fn self_owned(self: Box<Struct<T>>, arg1: int, arg2: int) -> int {
zzz(); // #break
arg1 + arg2
}
}
fn main() {
let stack = Struct { x: (8888_u32, -8888_i32) };
let _ = stack.self_by_ref(-1, -2);
let _ = stack.self_by_val(-3, -4);
let owned = box Struct { x: 1234.5f64 };
let _ = owned.self_by_ref(-5, -6);
let _ = owned.self_by_val(-7, -8);
let _ = owned.self_owned(-9, -10);
}
fn zzz() {()}
|
}
|
random_line_split
|
header.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use common::config;
use common;
use util;
pub struct TestProps {
// Lines that should be expected, in order, on standard out
error_patterns: ~[~str],
// Extra flags to pass to the compiler
compile_flags: Option<~str>,
// If present, the name of a file that this test should match when
// pretty-printed
pp_exact: Option<Path>,
// Modules from aux directory that should be compiled
aux_builds: ~[~str],
// Environment settings to use during execution
exec_env: ~[(~str,~str)],
// Commands to be given to the debugger, when testing debug info
debugger_cmds: ~[~str],
// Lines to check if they appear in the expected debugger output
check_lines: ~[~str],
}
// Load any test directives embedded in the file
pub fn load_props(testfile: &Path) -> TestProps {
let mut error_patterns = ~[];
let mut aux_builds = ~[];
let mut exec_env = ~[];
let mut compile_flags = None;
let mut pp_exact = None;
let mut debugger_cmds = ~[];
let mut check_lines = ~[];
iter_header(testfile, |ln| {
match parse_error_pattern(ln) {
Some(ep) => error_patterns.push(ep),
None => ()
};
if compile_flags.is_none() {
compile_flags = parse_compile_flags(ln);
}
if pp_exact.is_none() {
pp_exact = parse_pp_exact(ln, testfile);
}
match parse_aux_build(ln) {
Some(ab) => { aux_builds.push(ab); }
None => {}
}
match parse_exec_env(ln) {
Some(ee) => { exec_env.push(ee); }
None => {}
}
match parse_debugger_cmd(ln) {
Some(dc) => debugger_cmds.push(dc),
None => ()
};
match parse_check_line(ln) {
Some(cl) => check_lines.push(cl),
None => ()
};
true
});
return TestProps {
error_patterns: error_patterns,
compile_flags: compile_flags,
pp_exact: pp_exact,
aux_builds: aux_builds,
exec_env: exec_env,
debugger_cmds: debugger_cmds,
check_lines: check_lines
};
}
pub fn is_test_ignored(config: &config, testfile: &Path) -> bool {
fn xfail_target(config: &config) -> ~str {
~"xfail-" + util::get_os(config.target)
}
let val = iter_header(testfile, |ln| {
if parse_name_directive(ln, "xfail-test") { false }
else if parse_name_directive(ln, xfail_target(config)) { false }
else if config.mode == common::mode_pretty &&
parse_name_directive(ln, "xfail-pretty") { false }
else { true }
});
!val
}
fn iter_header(testfile: &Path, it: |&str| -> bool) -> bool {
use std::io::buffered::BufferedReader;
use std::io::File;
let mut rdr = BufferedReader::new(File::open(testfile).unwrap());
for ln in rdr.lines() {
// Assume that any directives will be found before the first
// module or function. This doesn't seem to be an optimization
// with a warm page cache. Maybe with a cold one.
if ln.starts_with("fn") || ln.starts_with("mod") {
return true;
} else { if!(it(ln.trim())) { return false; } }
}
return true;
}
fn parse_error_pattern(line: &str) -> Option<~str>
|
fn parse_aux_build(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"aux-build")
}
fn parse_compile_flags(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"compile-flags")
}
fn parse_debugger_cmd(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"debugger")
}
fn parse_check_line(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"check")
}
fn parse_exec_env(line: &str) -> Option<(~str, ~str)> {
parse_name_value_directive(line, ~"exec-env").map(|nv| {
// nv is either FOO or FOO=BAR
let mut strs: ~[~str] = nv.splitn('=', 1).map(|s| s.to_owned()).collect();
match strs.len() {
1u => (strs.pop(), ~""),
2u => {
let end = strs.pop();
(strs.pop(), end)
}
n => fail!("Expected 1 or 2 strings, not {}", n)
}
})
}
fn parse_pp_exact(line: &str, testfile: &Path) -> Option<Path> {
match parse_name_value_directive(line, ~"pp-exact") {
Some(s) => Some(Path::new(s)),
None => {
if parse_name_directive(line, "pp-exact") {
testfile.filename().map(|s| Path::new(s))
} else {
None
}
}
}
}
fn parse_name_directive(line: &str, directive: &str) -> bool {
line.contains(directive)
}
fn parse_name_value_directive(line: &str,
directive: ~str) -> Option<~str> {
let keycolon = directive + ":";
match line.find_str(keycolon) {
Some(colon) => {
let value = line.slice(colon + keycolon.len(),
line.len()).to_owned();
debug!("{}: {}", directive, value);
Some(value)
}
None => None
}
}
|
{
parse_name_value_directive(line, ~"error-pattern")
}
|
identifier_body
|
header.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use common::config;
use common;
use util;
pub struct TestProps {
// Lines that should be expected, in order, on standard out
error_patterns: ~[~str],
// Extra flags to pass to the compiler
compile_flags: Option<~str>,
// If present, the name of a file that this test should match when
// pretty-printed
pp_exact: Option<Path>,
// Modules from aux directory that should be compiled
aux_builds: ~[~str],
// Environment settings to use during execution
exec_env: ~[(~str,~str)],
// Commands to be given to the debugger, when testing debug info
debugger_cmds: ~[~str],
// Lines to check if they appear in the expected debugger output
check_lines: ~[~str],
}
// Load any test directives embedded in the file
pub fn load_props(testfile: &Path) -> TestProps {
let mut error_patterns = ~[];
let mut aux_builds = ~[];
let mut exec_env = ~[];
let mut compile_flags = None;
let mut pp_exact = None;
let mut debugger_cmds = ~[];
let mut check_lines = ~[];
iter_header(testfile, |ln| {
match parse_error_pattern(ln) {
Some(ep) => error_patterns.push(ep),
None => ()
};
if compile_flags.is_none() {
compile_flags = parse_compile_flags(ln);
}
if pp_exact.is_none() {
pp_exact = parse_pp_exact(ln, testfile);
}
match parse_aux_build(ln) {
Some(ab) => { aux_builds.push(ab); }
None => {}
}
match parse_exec_env(ln) {
Some(ee) => { exec_env.push(ee); }
None => {}
}
match parse_debugger_cmd(ln) {
Some(dc) => debugger_cmds.push(dc),
None => ()
};
match parse_check_line(ln) {
Some(cl) => check_lines.push(cl),
None => ()
};
true
});
return TestProps {
error_patterns: error_patterns,
compile_flags: compile_flags,
pp_exact: pp_exact,
aux_builds: aux_builds,
exec_env: exec_env,
debugger_cmds: debugger_cmds,
check_lines: check_lines
};
}
pub fn is_test_ignored(config: &config, testfile: &Path) -> bool {
fn xfail_target(config: &config) -> ~str {
~"xfail-" + util::get_os(config.target)
}
let val = iter_header(testfile, |ln| {
if parse_name_directive(ln, "xfail-test") { false }
else if parse_name_directive(ln, xfail_target(config)) { false }
else if config.mode == common::mode_pretty &&
parse_name_directive(ln, "xfail-pretty") { false }
else { true }
});
!val
}
fn iter_header(testfile: &Path, it: |&str| -> bool) -> bool {
use std::io::buffered::BufferedReader;
use std::io::File;
let mut rdr = BufferedReader::new(File::open(testfile).unwrap());
for ln in rdr.lines() {
// Assume that any directives will be found before the first
// module or function. This doesn't seem to be an optimization
// with a warm page cache. Maybe with a cold one.
if ln.starts_with("fn") || ln.starts_with("mod") {
return true;
} else { if!(it(ln.trim())) { return false; } }
}
return true;
}
fn parse_error_pattern(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"error-pattern")
}
fn parse_aux_build(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"aux-build")
}
fn parse_compile_flags(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"compile-flags")
}
fn parse_debugger_cmd(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"debugger")
}
fn parse_check_line(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"check")
}
fn parse_exec_env(line: &str) -> Option<(~str, ~str)> {
parse_name_value_directive(line, ~"exec-env").map(|nv| {
// nv is either FOO or FOO=BAR
let mut strs: ~[~str] = nv.splitn('=', 1).map(|s| s.to_owned()).collect();
match strs.len() {
1u => (strs.pop(), ~""),
2u => {
let end = strs.pop();
(strs.pop(), end)
}
n => fail!("Expected 1 or 2 strings, not {}", n)
}
})
}
fn parse_pp_exact(line: &str, testfile: &Path) -> Option<Path> {
match parse_name_value_directive(line, ~"pp-exact") {
Some(s) => Some(Path::new(s)),
None => {
if parse_name_directive(line, "pp-exact") {
testfile.filename().map(|s| Path::new(s))
} else {
None
}
}
}
}
fn parse_name_directive(line: &str, directive: &str) -> bool {
line.contains(directive)
}
fn
|
(line: &str,
directive: ~str) -> Option<~str> {
let keycolon = directive + ":";
match line.find_str(keycolon) {
Some(colon) => {
let value = line.slice(colon + keycolon.len(),
line.len()).to_owned();
debug!("{}: {}", directive, value);
Some(value)
}
None => None
}
}
|
parse_name_value_directive
|
identifier_name
|
header.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use common::config;
use common;
use util;
pub struct TestProps {
// Lines that should be expected, in order, on standard out
error_patterns: ~[~str],
// Extra flags to pass to the compiler
compile_flags: Option<~str>,
// If present, the name of a file that this test should match when
// pretty-printed
pp_exact: Option<Path>,
// Modules from aux directory that should be compiled
aux_builds: ~[~str],
// Environment settings to use during execution
exec_env: ~[(~str,~str)],
// Commands to be given to the debugger, when testing debug info
debugger_cmds: ~[~str],
// Lines to check if they appear in the expected debugger output
check_lines: ~[~str],
}
// Load any test directives embedded in the file
pub fn load_props(testfile: &Path) -> TestProps {
let mut error_patterns = ~[];
let mut aux_builds = ~[];
let mut exec_env = ~[];
let mut compile_flags = None;
let mut pp_exact = None;
let mut debugger_cmds = ~[];
let mut check_lines = ~[];
iter_header(testfile, |ln| {
match parse_error_pattern(ln) {
Some(ep) => error_patterns.push(ep),
None => ()
};
if compile_flags.is_none() {
compile_flags = parse_compile_flags(ln);
}
if pp_exact.is_none() {
pp_exact = parse_pp_exact(ln, testfile);
}
match parse_aux_build(ln) {
Some(ab) => { aux_builds.push(ab); }
None => {}
}
match parse_exec_env(ln) {
Some(ee) => { exec_env.push(ee); }
None =>
|
}
match parse_debugger_cmd(ln) {
Some(dc) => debugger_cmds.push(dc),
None => ()
};
match parse_check_line(ln) {
Some(cl) => check_lines.push(cl),
None => ()
};
true
});
return TestProps {
error_patterns: error_patterns,
compile_flags: compile_flags,
pp_exact: pp_exact,
aux_builds: aux_builds,
exec_env: exec_env,
debugger_cmds: debugger_cmds,
check_lines: check_lines
};
}
pub fn is_test_ignored(config: &config, testfile: &Path) -> bool {
fn xfail_target(config: &config) -> ~str {
~"xfail-" + util::get_os(config.target)
}
let val = iter_header(testfile, |ln| {
if parse_name_directive(ln, "xfail-test") { false }
else if parse_name_directive(ln, xfail_target(config)) { false }
else if config.mode == common::mode_pretty &&
parse_name_directive(ln, "xfail-pretty") { false }
else { true }
});
!val
}
fn iter_header(testfile: &Path, it: |&str| -> bool) -> bool {
use std::io::buffered::BufferedReader;
use std::io::File;
let mut rdr = BufferedReader::new(File::open(testfile).unwrap());
for ln in rdr.lines() {
// Assume that any directives will be found before the first
// module or function. This doesn't seem to be an optimization
// with a warm page cache. Maybe with a cold one.
if ln.starts_with("fn") || ln.starts_with("mod") {
return true;
} else { if!(it(ln.trim())) { return false; } }
}
return true;
}
fn parse_error_pattern(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"error-pattern")
}
fn parse_aux_build(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"aux-build")
}
fn parse_compile_flags(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"compile-flags")
}
fn parse_debugger_cmd(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"debugger")
}
fn parse_check_line(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"check")
}
fn parse_exec_env(line: &str) -> Option<(~str, ~str)> {
parse_name_value_directive(line, ~"exec-env").map(|nv| {
// nv is either FOO or FOO=BAR
let mut strs: ~[~str] = nv.splitn('=', 1).map(|s| s.to_owned()).collect();
match strs.len() {
1u => (strs.pop(), ~""),
2u => {
let end = strs.pop();
(strs.pop(), end)
}
n => fail!("Expected 1 or 2 strings, not {}", n)
}
})
}
fn parse_pp_exact(line: &str, testfile: &Path) -> Option<Path> {
match parse_name_value_directive(line, ~"pp-exact") {
Some(s) => Some(Path::new(s)),
None => {
if parse_name_directive(line, "pp-exact") {
testfile.filename().map(|s| Path::new(s))
} else {
None
}
}
}
}
fn parse_name_directive(line: &str, directive: &str) -> bool {
line.contains(directive)
}
fn parse_name_value_directive(line: &str,
directive: ~str) -> Option<~str> {
let keycolon = directive + ":";
match line.find_str(keycolon) {
Some(colon) => {
let value = line.slice(colon + keycolon.len(),
line.len()).to_owned();
debug!("{}: {}", directive, value);
Some(value)
}
None => None
}
}
|
{}
|
conditional_block
|
header.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use common::config;
use common;
use util;
pub struct TestProps {
// Lines that should be expected, in order, on standard out
error_patterns: ~[~str],
// Extra flags to pass to the compiler
compile_flags: Option<~str>,
// If present, the name of a file that this test should match when
// pretty-printed
pp_exact: Option<Path>,
// Modules from aux directory that should be compiled
aux_builds: ~[~str],
// Environment settings to use during execution
exec_env: ~[(~str,~str)],
// Commands to be given to the debugger, when testing debug info
debugger_cmds: ~[~str],
// Lines to check if they appear in the expected debugger output
check_lines: ~[~str],
}
// Load any test directives embedded in the file
pub fn load_props(testfile: &Path) -> TestProps {
let mut error_patterns = ~[];
let mut aux_builds = ~[];
let mut exec_env = ~[];
let mut compile_flags = None;
let mut pp_exact = None;
let mut debugger_cmds = ~[];
let mut check_lines = ~[];
iter_header(testfile, |ln| {
match parse_error_pattern(ln) {
Some(ep) => error_patterns.push(ep),
None => ()
};
if compile_flags.is_none() {
compile_flags = parse_compile_flags(ln);
}
if pp_exact.is_none() {
pp_exact = parse_pp_exact(ln, testfile);
}
match parse_aux_build(ln) {
Some(ab) => { aux_builds.push(ab); }
None => {}
}
match parse_exec_env(ln) {
Some(ee) => { exec_env.push(ee); }
None => {}
}
match parse_debugger_cmd(ln) {
Some(dc) => debugger_cmds.push(dc),
None => ()
};
match parse_check_line(ln) {
Some(cl) => check_lines.push(cl),
None => ()
};
true
});
return TestProps {
error_patterns: error_patterns,
compile_flags: compile_flags,
pp_exact: pp_exact,
aux_builds: aux_builds,
exec_env: exec_env,
debugger_cmds: debugger_cmds,
check_lines: check_lines
};
}
pub fn is_test_ignored(config: &config, testfile: &Path) -> bool {
fn xfail_target(config: &config) -> ~str {
~"xfail-" + util::get_os(config.target)
}
let val = iter_header(testfile, |ln| {
if parse_name_directive(ln, "xfail-test") { false }
else if parse_name_directive(ln, xfail_target(config)) { false }
else if config.mode == common::mode_pretty &&
parse_name_directive(ln, "xfail-pretty") { false }
else { true }
});
!val
}
fn iter_header(testfile: &Path, it: |&str| -> bool) -> bool {
use std::io::buffered::BufferedReader;
use std::io::File;
let mut rdr = BufferedReader::new(File::open(testfile).unwrap());
for ln in rdr.lines() {
// Assume that any directives will be found before the first
// module or function. This doesn't seem to be an optimization
// with a warm page cache. Maybe with a cold one.
if ln.starts_with("fn") || ln.starts_with("mod") {
return true;
} else { if!(it(ln.trim())) { return false; } }
}
return true;
}
fn parse_error_pattern(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"error-pattern")
}
fn parse_aux_build(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"aux-build")
}
fn parse_compile_flags(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"compile-flags")
}
fn parse_debugger_cmd(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"debugger")
}
fn parse_check_line(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"check")
}
fn parse_exec_env(line: &str) -> Option<(~str, ~str)> {
parse_name_value_directive(line, ~"exec-env").map(|nv| {
// nv is either FOO or FOO=BAR
let mut strs: ~[~str] = nv.splitn('=', 1).map(|s| s.to_owned()).collect();
match strs.len() {
1u => (strs.pop(), ~""),
2u => {
let end = strs.pop();
(strs.pop(), end)
}
n => fail!("Expected 1 or 2 strings, not {}", n)
}
})
}
fn parse_pp_exact(line: &str, testfile: &Path) -> Option<Path> {
match parse_name_value_directive(line, ~"pp-exact") {
Some(s) => Some(Path::new(s)),
None => {
if parse_name_directive(line, "pp-exact") {
testfile.filename().map(|s| Path::new(s))
} else {
None
}
}
}
}
fn parse_name_directive(line: &str, directive: &str) -> bool {
line.contains(directive)
}
|
Some(colon) => {
let value = line.slice(colon + keycolon.len(),
line.len()).to_owned();
debug!("{}: {}", directive, value);
Some(value)
}
None => None
}
}
|
fn parse_name_value_directive(line: &str,
directive: ~str) -> Option<~str> {
let keycolon = directive + ":";
match line.find_str(keycolon) {
|
random_line_split
|
room.rs
|
use diesel;
use diesel::pg::PgConnection;
use regex::Regex;
use rocket::http::Status;
use rocket::response::Failure;
use super::schema::rooms;
|
#[derive(Clone, Serialize, Deserialize, Queryable, Identifiable)]
pub struct Room {
pub id: i64,
pub name: String,
pub description: Option<String>,
pub is_public: bool,
}
#[derive(Insertable, Deserialize)]
#[table_name = "rooms"]
pub struct NewRoom {
pub name: String,
pub description: Option<String>,
pub is_public: bool,
}
#[derive(FromForm)]
pub struct SearchRoom {
pub name: String,
}
impl Room {
#[inline]
pub fn create(conn: &PgConnection, mut new_room: NewRoom) -> Result<Room, Failure> {
use diesel::prelude::*;
use diesel::result::Error;
new_room.name = new_room.name.trim().to_string();
let regex = Regex::new(r"^[[:word:]]{3,20}$").unwrap();
if!regex.is_match(&new_room.name) {
return Err(Failure(Status::BadRequest));
}
// I add the type here because othwerise the clone() doesn't know which type it is.
let created_room: Result<Room, Error> = diesel::insert_into(rooms::table)
.values(&new_room)
.get_result(conn);
match created_room {
Ok(room) => {
play_video_thread(room.clone());
Ok(room)
}
Err(_) => Err(Failure(Status::Conflict)),
}
}
#[inline]
pub fn update(conn: &PgConnection, room: &Room) -> Result<Room, Failure> {
use diesel::prelude::*;
use schema::rooms::dsl::*;
let regex = Regex::new(r"^[[:word:]]{3,20}$").unwrap();
if!regex.is_match(&room.name) {
return Err(Failure(Status::BadRequest));
}
let result = diesel::update(rooms)
.set((
description.eq(room.description.clone()),
name.eq(room.name.clone()),
is_public.eq(room.is_public),
))
.get_result(conn);
match result {
Ok(updated_room) => Ok(updated_room),
Err(_) => Err(Failure(Status::Conflict)),
}
}
#[inline]
pub fn delete(conn: &PgConnection, room_id: i64) -> Result<(), Failure> {
use diesel::prelude::*;
use schema::rooms::dsl::*;
use std::fs;
let result = diesel::delete(rooms.filter(id.eq(room_id))).execute(conn);
if result.is_err() {
return Err(Failure(Status::InternalServerError));
}
let picture_url = format!("{}/{}", *super::PICTURES_DIR, room_id).to_string();
let _res = fs::remove_file(picture_url);
Ok(())
}
// Find & return a room by id
#[inline]
pub fn find(conn: &PgConnection, room_id: i64) -> Option<Room> {
use diesel::prelude::*;
use schema::rooms::dsl::*;
let result = rooms.filter(id.eq(room_id)).first::<Room>(conn);
match result {
Ok(result) => Some(result),
Err(_e) => None,
}
}
// Return all rooms
#[inline]
pub fn all(conn: &PgConnection, query: Option<String>) -> Result<Vec<Room>, Failure> {
use diesel::prelude::*;
use schema::rooms::dsl::*;
let result;
match query {
Some(query) => {
result = rooms
.filter(name.ilike(format!("%{}%", query.to_lowercase())))
.order(name.desc())
.load::<Room>(conn);
}
None => {
result = rooms.order(id.asc()).load::<Room>(conn);
}
}
match result {
Ok(result) => Ok(result),
Err(e) => {
println!("Error while fetching the rooms: {}", e);
Err(Failure(Status::InternalServerError))
}
}
}
}
|
use player::play_video_thread;
|
random_line_split
|
room.rs
|
use diesel;
use diesel::pg::PgConnection;
use regex::Regex;
use rocket::http::Status;
use rocket::response::Failure;
use super::schema::rooms;
use player::play_video_thread;
#[derive(Clone, Serialize, Deserialize, Queryable, Identifiable)]
pub struct Room {
pub id: i64,
pub name: String,
pub description: Option<String>,
pub is_public: bool,
}
#[derive(Insertable, Deserialize)]
#[table_name = "rooms"]
pub struct NewRoom {
pub name: String,
pub description: Option<String>,
pub is_public: bool,
}
#[derive(FromForm)]
pub struct SearchRoom {
pub name: String,
}
impl Room {
#[inline]
pub fn create(conn: &PgConnection, mut new_room: NewRoom) -> Result<Room, Failure> {
use diesel::prelude::*;
use diesel::result::Error;
new_room.name = new_room.name.trim().to_string();
let regex = Regex::new(r"^[[:word:]]{3,20}$").unwrap();
if!regex.is_match(&new_room.name) {
return Err(Failure(Status::BadRequest));
}
// I add the type here because othwerise the clone() doesn't know which type it is.
let created_room: Result<Room, Error> = diesel::insert_into(rooms::table)
.values(&new_room)
.get_result(conn);
match created_room {
Ok(room) => {
play_video_thread(room.clone());
Ok(room)
}
Err(_) => Err(Failure(Status::Conflict)),
}
}
#[inline]
pub fn update(conn: &PgConnection, room: &Room) -> Result<Room, Failure> {
use diesel::prelude::*;
use schema::rooms::dsl::*;
let regex = Regex::new(r"^[[:word:]]{3,20}$").unwrap();
if!regex.is_match(&room.name) {
return Err(Failure(Status::BadRequest));
}
let result = diesel::update(rooms)
.set((
description.eq(room.description.clone()),
name.eq(room.name.clone()),
is_public.eq(room.is_public),
))
.get_result(conn);
match result {
Ok(updated_room) => Ok(updated_room),
Err(_) => Err(Failure(Status::Conflict)),
}
}
#[inline]
pub fn delete(conn: &PgConnection, room_id: i64) -> Result<(), Failure> {
use diesel::prelude::*;
use schema::rooms::dsl::*;
use std::fs;
let result = diesel::delete(rooms.filter(id.eq(room_id))).execute(conn);
if result.is_err() {
return Err(Failure(Status::InternalServerError));
}
let picture_url = format!("{}/{}", *super::PICTURES_DIR, room_id).to_string();
let _res = fs::remove_file(picture_url);
Ok(())
}
// Find & return a room by id
#[inline]
pub fn find(conn: &PgConnection, room_id: i64) -> Option<Room> {
use diesel::prelude::*;
use schema::rooms::dsl::*;
let result = rooms.filter(id.eq(room_id)).first::<Room>(conn);
match result {
Ok(result) => Some(result),
Err(_e) => None,
}
}
// Return all rooms
#[inline]
pub fn all(conn: &PgConnection, query: Option<String>) -> Result<Vec<Room>, Failure>
|
Err(e) => {
println!("Error while fetching the rooms: {}", e);
Err(Failure(Status::InternalServerError))
}
}
}
}
|
{
use diesel::prelude::*;
use schema::rooms::dsl::*;
let result;
match query {
Some(query) => {
result = rooms
.filter(name.ilike(format!("%{}%", query.to_lowercase())))
.order(name.desc())
.load::<Room>(conn);
}
None => {
result = rooms.order(id.asc()).load::<Room>(conn);
}
}
match result {
Ok(result) => Ok(result),
|
identifier_body
|
room.rs
|
use diesel;
use diesel::pg::PgConnection;
use regex::Regex;
use rocket::http::Status;
use rocket::response::Failure;
use super::schema::rooms;
use player::play_video_thread;
#[derive(Clone, Serialize, Deserialize, Queryable, Identifiable)]
pub struct Room {
pub id: i64,
pub name: String,
pub description: Option<String>,
pub is_public: bool,
}
#[derive(Insertable, Deserialize)]
#[table_name = "rooms"]
pub struct
|
{
pub name: String,
pub description: Option<String>,
pub is_public: bool,
}
#[derive(FromForm)]
pub struct SearchRoom {
pub name: String,
}
impl Room {
#[inline]
pub fn create(conn: &PgConnection, mut new_room: NewRoom) -> Result<Room, Failure> {
use diesel::prelude::*;
use diesel::result::Error;
new_room.name = new_room.name.trim().to_string();
let regex = Regex::new(r"^[[:word:]]{3,20}$").unwrap();
if!regex.is_match(&new_room.name) {
return Err(Failure(Status::BadRequest));
}
// I add the type here because othwerise the clone() doesn't know which type it is.
let created_room: Result<Room, Error> = diesel::insert_into(rooms::table)
.values(&new_room)
.get_result(conn);
match created_room {
Ok(room) => {
play_video_thread(room.clone());
Ok(room)
}
Err(_) => Err(Failure(Status::Conflict)),
}
}
#[inline]
pub fn update(conn: &PgConnection, room: &Room) -> Result<Room, Failure> {
use diesel::prelude::*;
use schema::rooms::dsl::*;
let regex = Regex::new(r"^[[:word:]]{3,20}$").unwrap();
if!regex.is_match(&room.name) {
return Err(Failure(Status::BadRequest));
}
let result = diesel::update(rooms)
.set((
description.eq(room.description.clone()),
name.eq(room.name.clone()),
is_public.eq(room.is_public),
))
.get_result(conn);
match result {
Ok(updated_room) => Ok(updated_room),
Err(_) => Err(Failure(Status::Conflict)),
}
}
#[inline]
pub fn delete(conn: &PgConnection, room_id: i64) -> Result<(), Failure> {
use diesel::prelude::*;
use schema::rooms::dsl::*;
use std::fs;
let result = diesel::delete(rooms.filter(id.eq(room_id))).execute(conn);
if result.is_err() {
return Err(Failure(Status::InternalServerError));
}
let picture_url = format!("{}/{}", *super::PICTURES_DIR, room_id).to_string();
let _res = fs::remove_file(picture_url);
Ok(())
}
// Find & return a room by id
#[inline]
pub fn find(conn: &PgConnection, room_id: i64) -> Option<Room> {
use diesel::prelude::*;
use schema::rooms::dsl::*;
let result = rooms.filter(id.eq(room_id)).first::<Room>(conn);
match result {
Ok(result) => Some(result),
Err(_e) => None,
}
}
// Return all rooms
#[inline]
pub fn all(conn: &PgConnection, query: Option<String>) -> Result<Vec<Room>, Failure> {
use diesel::prelude::*;
use schema::rooms::dsl::*;
let result;
match query {
Some(query) => {
result = rooms
.filter(name.ilike(format!("%{}%", query.to_lowercase())))
.order(name.desc())
.load::<Room>(conn);
}
None => {
result = rooms.order(id.asc()).load::<Room>(conn);
}
}
match result {
Ok(result) => Ok(result),
Err(e) => {
println!("Error while fetching the rooms: {}", e);
Err(Failure(Status::InternalServerError))
}
}
}
}
|
NewRoom
|
identifier_name
|
object-lifetime-default-from-box-error.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test various cases where the defaults should lead to errors being
// reported.
#![allow(dead_code)]
trait SomeTrait {
fn
|
(&self) { }
}
struct SomeStruct<'a> {
r: Box<SomeTrait+'a>
}
fn load(ss: &mut SomeStruct) -> Box<SomeTrait> {
// `Box<SomeTrait>` defaults to a `'static` bound, so this return
// is illegal.
ss.r //~ ERROR mismatched types
}
fn store(ss: &mut SomeStruct, b: Box<SomeTrait>) {
// No error: b is bounded by'static which outlives the
// (anonymous) lifetime on the struct.
ss.r = b;
}
fn store1<'b>(ss: &mut SomeStruct, b: Box<SomeTrait+'b>) {
// Here we override the lifetimes explicitly, and so naturally we get an error.
ss.r = b; //~ ERROR mismatched types
}
fn main() {
}
|
dummy
|
identifier_name
|
object-lifetime-default-from-box-error.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test various cases where the defaults should lead to errors being
// reported.
#![allow(dead_code)]
trait SomeTrait {
fn dummy(&self)
|
}
struct SomeStruct<'a> {
r: Box<SomeTrait+'a>
}
fn load(ss: &mut SomeStruct) -> Box<SomeTrait> {
// `Box<SomeTrait>` defaults to a `'static` bound, so this return
// is illegal.
ss.r //~ ERROR mismatched types
}
fn store(ss: &mut SomeStruct, b: Box<SomeTrait>) {
// No error: b is bounded by'static which outlives the
// (anonymous) lifetime on the struct.
ss.r = b;
}
fn store1<'b>(ss: &mut SomeStruct, b: Box<SomeTrait+'b>) {
// Here we override the lifetimes explicitly, and so naturally we get an error.
ss.r = b; //~ ERROR mismatched types
}
fn main() {
}
|
{ }
|
identifier_body
|
object-lifetime-default-from-box-error.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test various cases where the defaults should lead to errors being
// reported.
#![allow(dead_code)]
trait SomeTrait {
fn dummy(&self) { }
}
struct SomeStruct<'a> {
r: Box<SomeTrait+'a>
}
fn load(ss: &mut SomeStruct) -> Box<SomeTrait> {
// `Box<SomeTrait>` defaults to a `'static` bound, so this return
// is illegal.
ss.r //~ ERROR mismatched types
}
fn store(ss: &mut SomeStruct, b: Box<SomeTrait>) {
// No error: b is bounded by'static which outlives the
// (anonymous) lifetime on the struct.
ss.r = b;
|
fn store1<'b>(ss: &mut SomeStruct, b: Box<SomeTrait+'b>) {
// Here we override the lifetimes explicitly, and so naturally we get an error.
ss.r = b; //~ ERROR mismatched types
}
fn main() {
}
|
}
|
random_line_split
|
once-cant-call-twice-on-stack.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Testing guarantees provided by once functions.
// This program would segfault if it were legal.
// compile-flags:-Z once-fns
extern mod extra;
use extra::arc;
use std::util;
fn foo(blk: &once fn()) {
blk();
blk(); //~ ERROR use of moved value
}
fn
|
() {
let x = arc::ARC(true);
do foo {
assert!(*x.get());
util::ignore(x);
}
}
|
main
|
identifier_name
|
once-cant-call-twice-on-stack.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
// except according to those terms.
// Testing guarantees provided by once functions.
// This program would segfault if it were legal.
// compile-flags:-Z once-fns
extern mod extra;
use extra::arc;
use std::util;
fn foo(blk: &once fn()) {
blk();
blk(); //~ ERROR use of moved value
}
fn main() {
let x = arc::ARC(true);
do foo {
assert!(*x.get());
util::ignore(x);
}
}
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
|
random_line_split
|
once-cant-call-twice-on-stack.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Testing guarantees provided by once functions.
// This program would segfault if it were legal.
// compile-flags:-Z once-fns
extern mod extra;
use extra::arc;
use std::util;
fn foo(blk: &once fn())
|
fn main() {
let x = arc::ARC(true);
do foo {
assert!(*x.get());
util::ignore(x);
}
}
|
{
blk();
blk(); //~ ERROR use of moved value
}
|
identifier_body
|
cloudformation.rs
|
#![cfg(feature = "cloudformation")]
extern crate rusoto;
use rusoto::cloudformation::{CloudFormationClient, ListStacksInput};
use rusoto::{DefaultCredentialsProvider, Region};
use rusoto::default_tls_client;
#[test]
fn should_list_stacks()
|
#[test]
fn should_list_stacks_with_status_filter() {
let client = CloudFormationClient::new(default_tls_client().unwrap(), DefaultCredentialsProvider::new().unwrap(), Region::UsEast1);
let filters = vec!["CREATE_COMPLETE".to_owned()];
let request = ListStacksInput {
stack_status_filter: Some(filters),
..Default::default()
};
let result = client.list_stacks(&request).unwrap();
println!("{:#?}", result);
}
|
{
let client = CloudFormationClient::new(default_tls_client().unwrap(), DefaultCredentialsProvider::new().unwrap(), Region::UsEast1);
let request = ListStacksInput::default();
let result = client.list_stacks(&request).unwrap();
println!("{:#?}", result);
}
|
identifier_body
|
cloudformation.rs
|
#![cfg(feature = "cloudformation")]
extern crate rusoto;
use rusoto::cloudformation::{CloudFormationClient, ListStacksInput};
use rusoto::{DefaultCredentialsProvider, Region};
use rusoto::default_tls_client;
#[test]
fn should_list_stacks() {
let client = CloudFormationClient::new(default_tls_client().unwrap(), DefaultCredentialsProvider::new().unwrap(), Region::UsEast1);
|
#[test]
fn should_list_stacks_with_status_filter() {
let client = CloudFormationClient::new(default_tls_client().unwrap(), DefaultCredentialsProvider::new().unwrap(), Region::UsEast1);
let filters = vec!["CREATE_COMPLETE".to_owned()];
let request = ListStacksInput {
stack_status_filter: Some(filters),
..Default::default()
};
let result = client.list_stacks(&request).unwrap();
println!("{:#?}", result);
}
|
let request = ListStacksInput::default();
let result = client.list_stacks(&request).unwrap();
println!("{:#?}", result);
}
|
random_line_split
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.