file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
count_fast.rs
|
use crate::word_count::WordCount;
use super::WordCountable;
#[cfg(any(target_os = "linux", target_os = "android"))]
use std::fs::OpenOptions;
use std::io::{self, ErrorKind, Read};
#[cfg(unix)]
use libc::S_IFREG;
#[cfg(unix)]
use nix::sys::stat;
#[cfg(any(target_os = "linux", target_os = "android"))]
use std::os::unix::io::AsRawFd;
#[cfg(any(target_os = "linux", target_os = "android"))]
use libc::S_IFIFO;
#[cfg(any(target_os = "linux", target_os = "android"))]
use uucore::pipes::{pipe, splice, splice_exact};
const BUF_SIZE: usize = 16 * 1024;
#[cfg(any(target_os = "linux", target_os = "android"))]
const SPLICE_SIZE: usize = 128 * 1024;
/// This is a Linux-specific function to count the number of bytes using the
/// `splice` system call, which is faster than using `read`.
///
/// On error it returns the number of bytes it did manage to read, since the
/// caller will fall back to a simpler method.
#[inline]
#[cfg(any(target_os = "linux", target_os = "android"))]
fn count_bytes_using_splice(fd: &impl AsRawFd) -> Result<usize, usize> {
let null_file = OpenOptions::new()
.write(true)
.open("/dev/null")
.map_err(|_| 0_usize)?;
let null_rdev = stat::fstat(null_file.as_raw_fd())
.map_err(|_| 0_usize)?
.st_rdev;
if (stat::major(null_rdev), stat::minor(null_rdev))!= (1, 3) {
// This is not a proper /dev/null, writing to it is probably bad
// Bit of an edge case, but it has been known to happen
return Err(0);
}
let (pipe_rd, pipe_wr) = pipe().map_err(|_| 0_usize)?;
let mut byte_count = 0;
|
Ok(res) => {
byte_count += res;
// Silent the warning as we want to the error message
#[allow(clippy::question_mark)]
if splice_exact(&pipe_rd, &null_file, res).is_err() {
return Err(byte_count);
}
}
Err(_) => return Err(byte_count),
};
}
Ok(byte_count)
}
/// In the special case where we only need to count the number of bytes. There
/// are several optimizations we can do:
/// 1. On Unix, we can simply `stat` the file if it is regular.
/// 2. On Linux -- if the above did not work -- we can use splice to count
/// the number of bytes if the file is a FIFO.
/// 3. Otherwise, we just read normally, but without the overhead of counting
/// other things such as lines and words.
#[inline]
pub(crate) fn count_bytes_fast<T: WordCountable>(handle: &mut T) -> (usize, Option<io::Error>) {
let mut byte_count = 0;
#[cfg(unix)]
{
let fd = handle.as_raw_fd();
if let Ok(stat) = stat::fstat(fd) {
// If the file is regular, then the `st_size` should hold
// the file's size in bytes.
// If stat.st_size = 0 then
// - either the size is 0
// - or the size is unknown.
// The second case happens for files in pseudo-filesystems. For
// example with /proc/version and /sys/kernel/profiling. So,
// if it is 0 we don't report that and instead do a full read.
if (stat.st_mode & S_IFREG)!= 0 && stat.st_size > 0 {
return (stat.st_size as usize, None);
}
#[cfg(any(target_os = "linux", target_os = "android"))]
{
// Else, if we're on Linux and our file is a FIFO pipe
// (or stdin), we use splice to count the number of bytes.
if (stat.st_mode & S_IFIFO)!= 0 {
match count_bytes_using_splice(handle) {
Ok(n) => return (n, None),
Err(n) => byte_count = n,
}
}
}
}
}
// Fall back on `read`, but without the overhead of counting words and lines.
let mut buf = [0_u8; BUF_SIZE];
loop {
match handle.read(&mut buf) {
Ok(0) => return (byte_count, None),
Ok(n) => {
byte_count += n;
}
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
Err(e) => return (byte_count, Some(e)),
}
}
}
pub(crate) fn count_bytes_and_lines_fast<R: Read>(
handle: &mut R,
) -> (WordCount, Option<io::Error>) {
let mut total = WordCount::default();
let mut buf = [0; BUF_SIZE];
loop {
match handle.read(&mut buf) {
Ok(0) => return (total, None),
Ok(n) => {
total.bytes += n;
total.lines += bytecount::count(&buf[..n], b'\n');
}
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
Err(e) => return (total, Some(e)),
}
}
}
|
loop {
match splice(fd, &pipe_wr, SPLICE_SIZE) {
Ok(0) => break,
|
random_line_split
|
custom_build.rs
|
use std::collections::HashMap;
use std::fs::{self, File};
use std::io::prelude::*;
use std::path::PathBuf;
use std::str;
use std::sync::Mutex;
use core::{Package, Target, PackageId, PackageSet, Profile};
use util::{CargoResult, human, Human};
use util::{internal, ChainError, profile};
use super::job::Work;
use super::{fingerprint, process, Kind, Context, Platform};
use super::CommandType;
use util::Freshness;
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug)]
pub struct BuildOutput {
/// Paths to pass to rustc with the `-L` flag
pub library_paths: Vec<PathBuf>,
/// Names and link kinds of libraries, suitable for the `-l` flag
pub library_links: Vec<String>,
/// Various `--cfg` flags to pass to the compiler
pub cfgs: Vec<String>,
/// Metadata to pass to the immediate dependencies
pub metadata: Vec<(String, String)>,
}
pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;
pub struct BuildState {
pub outputs: Mutex<BuildMap>,
}
/// Prepares a `Work` that executes the target as a custom build script.
///
/// The `req` given is the requirement which this run of the build script will
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
pub fn prepare(pkg: &Package, target: &Target, req: Platform,
cx: &mut Context) -> CargoResult<(Work, Work, Freshness)> {
let _p = profile::start(format!("build script prepare: {}/{}",
pkg, target.name()));
let kind = match req { Platform::Plugin => Kind::Host, _ => Kind::Target, };
let (script_output, build_output) = {
(cx.layout(pkg, Kind::Host).build(pkg),
cx.layout(pkg, kind).build_out(pkg))
};
// Building the command to execute
let to_exec = script_output.join(target.name());
// Start preparing the process to execute, starting out with some
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
// package's library profile.
let profile = cx.lib_profile(pkg.package_id());
let to_exec = to_exec.into_os_string();
let mut p = try!(super::process(CommandType::Host(to_exec), pkg, target, cx));
p.env("OUT_DIR", &build_output)
.env("CARGO_MANIFEST_DIR", pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
.env("TARGET", &match kind {
Kind::Host => &cx.config.rustc_info().host[..],
Kind::Target => cx.target_triple(),
})
.env("DEBUG", &profile.debuginfo.to_string())
.env("OPT_LEVEL", &profile.opt_level.to_string())
.env("PROFILE", if cx.build_config.release {"release"} else {"debug"})
.env("HOST", &cx.config.rustc_info().host);
// Be sure to pass along all enabled features for this package, this is the
// last piece of statically known information that we have.
match cx.resolve.features(pkg.package_id()) {
Some(features) => {
for feat in features.iter() {
p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1");
}
}
None => {}
}
// Gather the set of native dependencies that this package has along with
// some other variables to close over.
//
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
let not_custom = pkg.targets().iter().find(|t| {
!t.is_custom_build()
}).unwrap();
cx.dep_targets(pkg, not_custom, profile).iter().filter_map(|&(pkg, t, _)| {
if!t.linkable() { return None }
pkg.manifest().links().map(|links| {
(links.to_string(), pkg.package_id().clone())
})
}).collect::<Vec<_>>()
};
let pkg_name = pkg.to_string();
let build_state = cx.build_state.clone();
let id = pkg.package_id().clone();
let all = (id.clone(), pkg_name.clone(), build_state.clone(),
build_output.clone());
let plugin_deps = super::load_build_deps(cx, pkg, target, profile,
Kind::Host);
try!(fs::create_dir_all(&cx.layout(pkg, Kind::Target).build(pkg)));
try!(fs::create_dir_all(&cx.layout(pkg, Kind::Host).build(pkg)));
let exec_engine = cx.exec_engine.clone();
// Prepare the unit of "dirty work" which will actually run the custom build
// command.
//
// Note that this has to do some extra work just before running the command
// to determine extra environment variables and such.
let work = Work::new(move |desc_tx| {
// Make sure that OUT_DIR exists.
//
// If we have an old build directory, then just move it into place,
// otherwise create it!
if fs::metadata(&build_output).is_err() {
try!(fs::create_dir(&build_output).chain_error(|| {
internal("failed to create script output directory for \
build command")
}));
}
// For all our native lib dependencies, pick up their metadata to pass
// along to this custom build command. We're also careful to augment our
// dynamic library search path in case the build script depended on any
// native dynamic libraries.
{
let build_state = build_state.outputs.lock().unwrap();
for &(ref name, ref id) in lib_deps.iter() {
let data = &build_state[&(id.clone(), kind)].metadata;
for &(ref key, ref value) in data.iter() {
p.env(&format!("DEP_{}_{}", super::envify(name),
super::envify(key)), value);
}
}
try!(super::add_plugin_deps(&mut p, &build_state, plugin_deps));
}
// And now finally, run the build command itself!
desc_tx.send(p.to_string()).ok();
let output = try!(exec_engine.exec_with_output(p).map_err(|mut e| {
e.desc = format!("failed to run custom build command for `{}`\n{}",
pkg_name, e.desc);
Human(e)
}));
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness).
//
// This is also the location where we provide feedback into the build
// state informing what variables were discovered via our script as
// well.
let output = try!(str::from_utf8(&output.stdout).map_err(|_| {
human("build script output was not valid utf-8")
}));
let parsed_output = try!(BuildOutput::parse(output, &pkg_name));
build_state.insert(id, req, parsed_output);
try!(File::create(&build_output.parent().unwrap().join("output"))
.and_then(|mut f| f.write_all(output.as_bytes()))
.map_err(|e| {
human(format!("failed to write output of custom build command: {}",
e))
}));
Ok(())
});
// Now that we've prepared our work-to-do, we need to prepare the fresh work
// itself to run when we actually end up just discarding what we calculated
// above.
//
// Note that the freshness calculation here is the build_cmd freshness, not
// target specific freshness. This is because we don't actually know what
// the inputs are to this command!
//
// Also note that a fresh build command needs to
let (freshness, dirty, fresh) =
try!(fingerprint::prepare_build_cmd(cx, pkg, kind));
let dirty = Work::new(move |tx| {
try!(work.call((tx.clone())));
dirty.call(tx)
});
let fresh = Work::new(move |tx| {
let (id, pkg_name, build_state, build_output) = all;
let new_loc = build_output.parent().unwrap().join("output");
let mut f = try!(File::open(&new_loc).map_err(|e| {
human(format!("failed to read cached build command output: {}", e))
}));
let mut contents = String::new();
try!(f.read_to_string(&mut contents));
let output = try!(BuildOutput::parse(&contents, &pkg_name));
build_state.insert(id, req, output);
fresh.call(tx)
});
Ok((dirty, fresh, freshness))
}
impl BuildState {
pub fn new(config: &super::BuildConfig,
packages: &PackageSet) -> BuildState {
let mut sources = HashMap::new();
for package in packages.iter() {
match package.manifest().links() {
Some(links) => {
sources.insert(links.to_string(),
package.package_id().clone());
}
None => {}
}
}
let mut outputs = HashMap::new();
let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host));
let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target));
for ((name, output), kind) in i1.chain(i2) {
// If no package is using the library named `name`, then this is
// just an override that we ignore.
if let Some(id) = sources.get(name) {
outputs.insert((id.clone(), kind), output.clone());
}
}
BuildState { outputs: Mutex::new(outputs) }
}
fn insert(&self, id: PackageId, req: Platform,
output: BuildOutput) {
let mut outputs = self.outputs.lock().unwrap();
match req {
Platform::Target => { outputs.insert((id, Kind::Target), output); }
Platform::Plugin => { outputs.insert((id, Kind::Host), output); }
// If this build output was for both the host and target platforms,
// we need to insert it at both places.
Platform::PluginAndTarget => {
outputs.insert((id.clone(), Kind::Host), output.clone());
|
}
}
}
impl BuildOutput {
// Parses the output of a script.
// The `pkg_name` is used for error messages.
pub fn parse(input: &str, pkg_name: &str) -> CargoResult<BuildOutput> {
let mut library_paths = Vec::new();
let mut library_links = Vec::new();
let mut cfgs = Vec::new();
let mut metadata = Vec::new();
let whence = format!("build script of `{}`", pkg_name);
for line in input.lines() {
let mut iter = line.splitn(2, ':');
if iter.next()!= Some("cargo") {
// skip this line since it doesn't start with "cargo:"
continue;
}
let data = match iter.next() {
Some(val) => val,
None => continue
};
// getting the `key=value` part of the line
let mut iter = data.splitn(2, '=');
let key = iter.next();
let value = iter.next();
let (key, value) = match (key, value) {
(Some(a), Some(b)) => (a, b.trim_right()),
// line started with `cargo:` but didn't match `key=value`
_ => return Err(human(format!("Wrong output in {}: `{}`",
whence, line)))
};
match key {
"rustc-flags" => {
let (libs, links) = try!(
BuildOutput::parse_rustc_flags(value, &whence)
);
library_links.extend(links.into_iter());
library_paths.extend(libs.into_iter());
}
"rustc-link-lib" => library_links.push(value.to_string()),
"rustc-link-search" => library_paths.push(PathBuf::from(value)),
"rustc-cfg" => cfgs.push(value.to_string()),
_ => metadata.push((key.to_string(), value.to_string())),
}
}
Ok(BuildOutput {
library_paths: library_paths,
library_links: library_links,
cfgs: cfgs,
metadata: metadata,
})
}
pub fn parse_rustc_flags(value: &str, whence: &str)
-> CargoResult<(Vec<PathBuf>, Vec<String>)> {
let value = value.trim();
let mut flags_iter = value.split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c|!c.is_whitespace()));
let (mut library_links, mut library_paths) = (Vec::new(), Vec::new());
loop {
let flag = match flags_iter.next() {
Some(f) => f,
None => break
};
if flag!= "-l" && flag!= "-L" {
return Err(human(format!("Only `-l` and `-L` flags are allowed \
in {}: `{}`",
whence, value)))
}
let value = match flags_iter.next() {
Some(v) => v,
None => return Err(human(format!("Flag in rustc-flags has no \
value in {}: `{}`",
whence, value)))
};
match flag {
"-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)),
// was already checked above
_ => return Err(human("only -l and -L flags are allowed"))
};
}
Ok((library_paths, library_links))
}
}
/// Compute the `build_scripts` map in the `Context` which tracks what build
/// scripts each package depends on.
///
/// The global `build_scripts` map lists for all (package, kind) tuples what set
/// of packages' build script outputs must be considered. For example this lists
/// all dependencies' `-L` flags which need to be propagated transitively.
///
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
pkg: &'b Package,
targets: &[(&'b Target, &'b Profile)]) {
let mut ret = HashMap::new();
for &(target, profile) in targets {
build(&mut ret, Kind::Target, pkg, target, profile, cx);
build(&mut ret, Kind::Host, pkg, target, profile, cx);
}
// Make the output a little more deterministic by sorting all dependencies
for (&(id, target, _, kind), slot) in ret.iter_mut() {
slot.sort();
slot.dedup();
debug!("script deps: {}/{}/{:?} => {:?}", id, target.name(), kind,
slot.iter().map(|s| s.to_string()).collect::<Vec<_>>());
}
cx.build_scripts = ret;
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<(&'b PackageId, &'b Target,
&'b Profile, Kind),
Vec<&'b PackageId>>,
kind: Kind,
pkg: &'b Package,
target: &'b Target,
profile: &'b Profile,
cx: &Context<'b, 'cfg>)
-> &'a [&'b PackageId] {
// If this target has crossed into "host-land" we need to change the
// kind that we're compiling for, and otherwise just do a quick
// pre-flight check to see if we've already calculated the set of
// dependencies.
let kind = if target.for_host() {Kind::Host} else {kind};
let id = pkg.package_id();
if out.contains_key(&(id, target, profile, kind)) {
return &out[&(id, target, profile, kind)]
}
// This loop is both the recursive and additive portion of this
// function, the key part of the logic being around determining the
// right `kind` to recurse on. If a dependency fits in the kind that
// we've got specified, then we just keep plazing a trail, but otherwise
// we *switch* the kind we're looking at because it must fit into the
// other category.
//
// We always recurse, but only add to our own array if the target is
// linkable to us (e.g. not a binary) and it's for the same original
// `kind`.
let mut ret = Vec::new();
for &(pkg, target, p) in cx.dep_targets(pkg, target, profile).iter() {
let req = cx.get_requirement(pkg, target);
let dep_kind = if req.includes(kind) {
kind
} else if kind == Kind::Target {
Kind::Host
} else {
Kind::Target
};
let dep_scripts = build(out, dep_kind, pkg, target, p, cx);
if target.linkable() && kind == dep_kind {
if pkg.has_custom_build() {
ret.push(pkg.package_id());
}
ret.extend(dep_scripts.iter().cloned());
}
}
let prev = out.entry((id, target, profile, kind)).or_insert(Vec::new());
prev.extend(ret);
return prev
}
}
|
outputs.insert((id, Kind::Target), output);
}
|
random_line_split
|
custom_build.rs
|
use std::collections::HashMap;
use std::fs::{self, File};
use std::io::prelude::*;
use std::path::PathBuf;
use std::str;
use std::sync::Mutex;
use core::{Package, Target, PackageId, PackageSet, Profile};
use util::{CargoResult, human, Human};
use util::{internal, ChainError, profile};
use super::job::Work;
use super::{fingerprint, process, Kind, Context, Platform};
use super::CommandType;
use util::Freshness;
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug)]
pub struct BuildOutput {
/// Paths to pass to rustc with the `-L` flag
pub library_paths: Vec<PathBuf>,
/// Names and link kinds of libraries, suitable for the `-l` flag
pub library_links: Vec<String>,
/// Various `--cfg` flags to pass to the compiler
pub cfgs: Vec<String>,
/// Metadata to pass to the immediate dependencies
pub metadata: Vec<(String, String)>,
}
pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;
pub struct BuildState {
pub outputs: Mutex<BuildMap>,
}
/// Prepares a `Work` that executes the target as a custom build script.
///
/// The `req` given is the requirement which this run of the build script will
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
pub fn prepare(pkg: &Package, target: &Target, req: Platform,
cx: &mut Context) -> CargoResult<(Work, Work, Freshness)> {
let _p = profile::start(format!("build script prepare: {}/{}",
pkg, target.name()));
let kind = match req { Platform::Plugin => Kind::Host, _ => Kind::Target, };
let (script_output, build_output) = {
(cx.layout(pkg, Kind::Host).build(pkg),
cx.layout(pkg, kind).build_out(pkg))
};
// Building the command to execute
let to_exec = script_output.join(target.name());
// Start preparing the process to execute, starting out with some
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
// package's library profile.
let profile = cx.lib_profile(pkg.package_id());
let to_exec = to_exec.into_os_string();
let mut p = try!(super::process(CommandType::Host(to_exec), pkg, target, cx));
p.env("OUT_DIR", &build_output)
.env("CARGO_MANIFEST_DIR", pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
.env("TARGET", &match kind {
Kind::Host => &cx.config.rustc_info().host[..],
Kind::Target => cx.target_triple(),
})
.env("DEBUG", &profile.debuginfo.to_string())
.env("OPT_LEVEL", &profile.opt_level.to_string())
.env("PROFILE", if cx.build_config.release {"release"} else {"debug"})
.env("HOST", &cx.config.rustc_info().host);
// Be sure to pass along all enabled features for this package, this is the
// last piece of statically known information that we have.
match cx.resolve.features(pkg.package_id()) {
Some(features) => {
for feat in features.iter() {
p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1");
}
}
None => {}
}
// Gather the set of native dependencies that this package has along with
// some other variables to close over.
//
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
let not_custom = pkg.targets().iter().find(|t| {
!t.is_custom_build()
}).unwrap();
cx.dep_targets(pkg, not_custom, profile).iter().filter_map(|&(pkg, t, _)| {
if!t.linkable() { return None }
pkg.manifest().links().map(|links| {
(links.to_string(), pkg.package_id().clone())
})
}).collect::<Vec<_>>()
};
let pkg_name = pkg.to_string();
let build_state = cx.build_state.clone();
let id = pkg.package_id().clone();
let all = (id.clone(), pkg_name.clone(), build_state.clone(),
build_output.clone());
let plugin_deps = super::load_build_deps(cx, pkg, target, profile,
Kind::Host);
try!(fs::create_dir_all(&cx.layout(pkg, Kind::Target).build(pkg)));
try!(fs::create_dir_all(&cx.layout(pkg, Kind::Host).build(pkg)));
let exec_engine = cx.exec_engine.clone();
// Prepare the unit of "dirty work" which will actually run the custom build
// command.
//
// Note that this has to do some extra work just before running the command
// to determine extra environment variables and such.
let work = Work::new(move |desc_tx| {
// Make sure that OUT_DIR exists.
//
// If we have an old build directory, then just move it into place,
// otherwise create it!
if fs::metadata(&build_output).is_err() {
try!(fs::create_dir(&build_output).chain_error(|| {
internal("failed to create script output directory for \
build command")
}));
}
// For all our native lib dependencies, pick up their metadata to pass
// along to this custom build command. We're also careful to augment our
// dynamic library search path in case the build script depended on any
// native dynamic libraries.
{
let build_state = build_state.outputs.lock().unwrap();
for &(ref name, ref id) in lib_deps.iter() {
let data = &build_state[&(id.clone(), kind)].metadata;
for &(ref key, ref value) in data.iter() {
p.env(&format!("DEP_{}_{}", super::envify(name),
super::envify(key)), value);
}
}
try!(super::add_plugin_deps(&mut p, &build_state, plugin_deps));
}
// And now finally, run the build command itself!
desc_tx.send(p.to_string()).ok();
let output = try!(exec_engine.exec_with_output(p).map_err(|mut e| {
e.desc = format!("failed to run custom build command for `{}`\n{}",
pkg_name, e.desc);
Human(e)
}));
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness).
//
// This is also the location where we provide feedback into the build
// state informing what variables were discovered via our script as
// well.
let output = try!(str::from_utf8(&output.stdout).map_err(|_| {
human("build script output was not valid utf-8")
}));
let parsed_output = try!(BuildOutput::parse(output, &pkg_name));
build_state.insert(id, req, parsed_output);
try!(File::create(&build_output.parent().unwrap().join("output"))
.and_then(|mut f| f.write_all(output.as_bytes()))
.map_err(|e| {
human(format!("failed to write output of custom build command: {}",
e))
}));
Ok(())
});
// Now that we've prepared our work-to-do, we need to prepare the fresh work
// itself to run when we actually end up just discarding what we calculated
// above.
//
// Note that the freshness calculation here is the build_cmd freshness, not
// target specific freshness. This is because we don't actually know what
// the inputs are to this command!
//
// Also note that a fresh build command needs to
let (freshness, dirty, fresh) =
try!(fingerprint::prepare_build_cmd(cx, pkg, kind));
let dirty = Work::new(move |tx| {
try!(work.call((tx.clone())));
dirty.call(tx)
});
let fresh = Work::new(move |tx| {
let (id, pkg_name, build_state, build_output) = all;
let new_loc = build_output.parent().unwrap().join("output");
let mut f = try!(File::open(&new_loc).map_err(|e| {
human(format!("failed to read cached build command output: {}", e))
}));
let mut contents = String::new();
try!(f.read_to_string(&mut contents));
let output = try!(BuildOutput::parse(&contents, &pkg_name));
build_state.insert(id, req, output);
fresh.call(tx)
});
Ok((dirty, fresh, freshness))
}
impl BuildState {
pub fn new(config: &super::BuildConfig,
packages: &PackageSet) -> BuildState {
let mut sources = HashMap::new();
for package in packages.iter() {
match package.manifest().links() {
Some(links) => {
sources.insert(links.to_string(),
package.package_id().clone());
}
None => {}
}
}
let mut outputs = HashMap::new();
let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host));
let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target));
for ((name, output), kind) in i1.chain(i2) {
// If no package is using the library named `name`, then this is
// just an override that we ignore.
if let Some(id) = sources.get(name) {
outputs.insert((id.clone(), kind), output.clone());
}
}
BuildState { outputs: Mutex::new(outputs) }
}
fn insert(&self, id: PackageId, req: Platform,
output: BuildOutput) {
let mut outputs = self.outputs.lock().unwrap();
match req {
Platform::Target => { outputs.insert((id, Kind::Target), output); }
Platform::Plugin => { outputs.insert((id, Kind::Host), output); }
// If this build output was for both the host and target platforms,
// we need to insert it at both places.
Platform::PluginAndTarget => {
outputs.insert((id.clone(), Kind::Host), output.clone());
outputs.insert((id, Kind::Target), output);
}
}
}
}
impl BuildOutput {
// Parses the output of a script.
// The `pkg_name` is used for error messages.
pub fn
|
(input: &str, pkg_name: &str) -> CargoResult<BuildOutput> {
let mut library_paths = Vec::new();
let mut library_links = Vec::new();
let mut cfgs = Vec::new();
let mut metadata = Vec::new();
let whence = format!("build script of `{}`", pkg_name);
for line in input.lines() {
let mut iter = line.splitn(2, ':');
if iter.next()!= Some("cargo") {
// skip this line since it doesn't start with "cargo:"
continue;
}
let data = match iter.next() {
Some(val) => val,
None => continue
};
// getting the `key=value` part of the line
let mut iter = data.splitn(2, '=');
let key = iter.next();
let value = iter.next();
let (key, value) = match (key, value) {
(Some(a), Some(b)) => (a, b.trim_right()),
// line started with `cargo:` but didn't match `key=value`
_ => return Err(human(format!("Wrong output in {}: `{}`",
whence, line)))
};
match key {
"rustc-flags" => {
let (libs, links) = try!(
BuildOutput::parse_rustc_flags(value, &whence)
);
library_links.extend(links.into_iter());
library_paths.extend(libs.into_iter());
}
"rustc-link-lib" => library_links.push(value.to_string()),
"rustc-link-search" => library_paths.push(PathBuf::from(value)),
"rustc-cfg" => cfgs.push(value.to_string()),
_ => metadata.push((key.to_string(), value.to_string())),
}
}
Ok(BuildOutput {
library_paths: library_paths,
library_links: library_links,
cfgs: cfgs,
metadata: metadata,
})
}
pub fn parse_rustc_flags(value: &str, whence: &str)
-> CargoResult<(Vec<PathBuf>, Vec<String>)> {
let value = value.trim();
let mut flags_iter = value.split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c|!c.is_whitespace()));
let (mut library_links, mut library_paths) = (Vec::new(), Vec::new());
loop {
let flag = match flags_iter.next() {
Some(f) => f,
None => break
};
if flag!= "-l" && flag!= "-L" {
return Err(human(format!("Only `-l` and `-L` flags are allowed \
in {}: `{}`",
whence, value)))
}
let value = match flags_iter.next() {
Some(v) => v,
None => return Err(human(format!("Flag in rustc-flags has no \
value in {}: `{}`",
whence, value)))
};
match flag {
"-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)),
// was already checked above
_ => return Err(human("only -l and -L flags are allowed"))
};
}
Ok((library_paths, library_links))
}
}
/// Compute the `build_scripts` map in the `Context` which tracks what build
/// scripts each package depends on.
///
/// The global `build_scripts` map lists for all (package, kind) tuples what set
/// of packages' build script outputs must be considered. For example this lists
/// all dependencies' `-L` flags which need to be propagated transitively.
///
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
pkg: &'b Package,
targets: &[(&'b Target, &'b Profile)]) {
let mut ret = HashMap::new();
for &(target, profile) in targets {
build(&mut ret, Kind::Target, pkg, target, profile, cx);
build(&mut ret, Kind::Host, pkg, target, profile, cx);
}
// Make the output a little more deterministic by sorting all dependencies
for (&(id, target, _, kind), slot) in ret.iter_mut() {
slot.sort();
slot.dedup();
debug!("script deps: {}/{}/{:?} => {:?}", id, target.name(), kind,
slot.iter().map(|s| s.to_string()).collect::<Vec<_>>());
}
cx.build_scripts = ret;
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<(&'b PackageId, &'b Target,
&'b Profile, Kind),
Vec<&'b PackageId>>,
kind: Kind,
pkg: &'b Package,
target: &'b Target,
profile: &'b Profile,
cx: &Context<'b, 'cfg>)
-> &'a [&'b PackageId] {
// If this target has crossed into "host-land" we need to change the
// kind that we're compiling for, and otherwise just do a quick
// pre-flight check to see if we've already calculated the set of
// dependencies.
let kind = if target.for_host() {Kind::Host} else {kind};
let id = pkg.package_id();
if out.contains_key(&(id, target, profile, kind)) {
return &out[&(id, target, profile, kind)]
}
// This loop is both the recursive and additive portion of this
// function, the key part of the logic being around determining the
// right `kind` to recurse on. If a dependency fits in the kind that
// we've got specified, then we just keep plazing a trail, but otherwise
// we *switch* the kind we're looking at because it must fit into the
// other category.
//
// We always recurse, but only add to our own array if the target is
// linkable to us (e.g. not a binary) and it's for the same original
// `kind`.
let mut ret = Vec::new();
for &(pkg, target, p) in cx.dep_targets(pkg, target, profile).iter() {
let req = cx.get_requirement(pkg, target);
let dep_kind = if req.includes(kind) {
kind
} else if kind == Kind::Target {
Kind::Host
} else {
Kind::Target
};
let dep_scripts = build(out, dep_kind, pkg, target, p, cx);
if target.linkable() && kind == dep_kind {
if pkg.has_custom_build() {
ret.push(pkg.package_id());
}
ret.extend(dep_scripts.iter().cloned());
}
}
let prev = out.entry((id, target, profile, kind)).or_insert(Vec::new());
prev.extend(ret);
return prev
}
}
|
parse
|
identifier_name
|
custom_build.rs
|
use std::collections::HashMap;
use std::fs::{self, File};
use std::io::prelude::*;
use std::path::PathBuf;
use std::str;
use std::sync::Mutex;
use core::{Package, Target, PackageId, PackageSet, Profile};
use util::{CargoResult, human, Human};
use util::{internal, ChainError, profile};
use super::job::Work;
use super::{fingerprint, process, Kind, Context, Platform};
use super::CommandType;
use util::Freshness;
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug)]
pub struct BuildOutput {
/// Paths to pass to rustc with the `-L` flag
pub library_paths: Vec<PathBuf>,
/// Names and link kinds of libraries, suitable for the `-l` flag
pub library_links: Vec<String>,
/// Various `--cfg` flags to pass to the compiler
pub cfgs: Vec<String>,
/// Metadata to pass to the immediate dependencies
pub metadata: Vec<(String, String)>,
}
pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;
pub struct BuildState {
pub outputs: Mutex<BuildMap>,
}
/// Prepares a `Work` that executes the target as a custom build script.
///
/// The `req` given is the requirement which this run of the build script will
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
pub fn prepare(pkg: &Package, target: &Target, req: Platform,
cx: &mut Context) -> CargoResult<(Work, Work, Freshness)> {
let _p = profile::start(format!("build script prepare: {}/{}",
pkg, target.name()));
let kind = match req { Platform::Plugin => Kind::Host, _ => Kind::Target, };
let (script_output, build_output) = {
(cx.layout(pkg, Kind::Host).build(pkg),
cx.layout(pkg, kind).build_out(pkg))
};
// Building the command to execute
let to_exec = script_output.join(target.name());
// Start preparing the process to execute, starting out with some
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
// package's library profile.
let profile = cx.lib_profile(pkg.package_id());
let to_exec = to_exec.into_os_string();
let mut p = try!(super::process(CommandType::Host(to_exec), pkg, target, cx));
p.env("OUT_DIR", &build_output)
.env("CARGO_MANIFEST_DIR", pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
.env("TARGET", &match kind {
Kind::Host => &cx.config.rustc_info().host[..],
Kind::Target => cx.target_triple(),
})
.env("DEBUG", &profile.debuginfo.to_string())
.env("OPT_LEVEL", &profile.opt_level.to_string())
.env("PROFILE", if cx.build_config.release {"release"} else {"debug"})
.env("HOST", &cx.config.rustc_info().host);
// Be sure to pass along all enabled features for this package, this is the
// last piece of statically known information that we have.
match cx.resolve.features(pkg.package_id()) {
Some(features) => {
for feat in features.iter() {
p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1");
}
}
None => {}
}
// Gather the set of native dependencies that this package has along with
// some other variables to close over.
//
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
let not_custom = pkg.targets().iter().find(|t| {
!t.is_custom_build()
}).unwrap();
cx.dep_targets(pkg, not_custom, profile).iter().filter_map(|&(pkg, t, _)| {
if!t.linkable() { return None }
pkg.manifest().links().map(|links| {
(links.to_string(), pkg.package_id().clone())
})
}).collect::<Vec<_>>()
};
let pkg_name = pkg.to_string();
let build_state = cx.build_state.clone();
let id = pkg.package_id().clone();
let all = (id.clone(), pkg_name.clone(), build_state.clone(),
build_output.clone());
let plugin_deps = super::load_build_deps(cx, pkg, target, profile,
Kind::Host);
try!(fs::create_dir_all(&cx.layout(pkg, Kind::Target).build(pkg)));
try!(fs::create_dir_all(&cx.layout(pkg, Kind::Host).build(pkg)));
let exec_engine = cx.exec_engine.clone();
// Prepare the unit of "dirty work" which will actually run the custom build
// command.
//
// Note that this has to do some extra work just before running the command
// to determine extra environment variables and such.
let work = Work::new(move |desc_tx| {
// Make sure that OUT_DIR exists.
//
// If we have an old build directory, then just move it into place,
// otherwise create it!
if fs::metadata(&build_output).is_err() {
try!(fs::create_dir(&build_output).chain_error(|| {
internal("failed to create script output directory for \
build command")
}));
}
// For all our native lib dependencies, pick up their metadata to pass
// along to this custom build command. We're also careful to augment our
// dynamic library search path in case the build script depended on any
// native dynamic libraries.
{
let build_state = build_state.outputs.lock().unwrap();
for &(ref name, ref id) in lib_deps.iter() {
let data = &build_state[&(id.clone(), kind)].metadata;
for &(ref key, ref value) in data.iter() {
p.env(&format!("DEP_{}_{}", super::envify(name),
super::envify(key)), value);
}
}
try!(super::add_plugin_deps(&mut p, &build_state, plugin_deps));
}
// And now finally, run the build command itself!
desc_tx.send(p.to_string()).ok();
let output = try!(exec_engine.exec_with_output(p).map_err(|mut e| {
e.desc = format!("failed to run custom build command for `{}`\n{}",
pkg_name, e.desc);
Human(e)
}));
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness).
//
// This is also the location where we provide feedback into the build
// state informing what variables were discovered via our script as
// well.
let output = try!(str::from_utf8(&output.stdout).map_err(|_| {
human("build script output was not valid utf-8")
}));
let parsed_output = try!(BuildOutput::parse(output, &pkg_name));
build_state.insert(id, req, parsed_output);
try!(File::create(&build_output.parent().unwrap().join("output"))
.and_then(|mut f| f.write_all(output.as_bytes()))
.map_err(|e| {
human(format!("failed to write output of custom build command: {}",
e))
}));
Ok(())
});
// Now that we've prepared our work-to-do, we need to prepare the fresh work
// itself to run when we actually end up just discarding what we calculated
// above.
//
// Note that the freshness calculation here is the build_cmd freshness, not
// target specific freshness. This is because we don't actually know what
// the inputs are to this command!
//
// Also note that a fresh build command needs to
let (freshness, dirty, fresh) =
try!(fingerprint::prepare_build_cmd(cx, pkg, kind));
let dirty = Work::new(move |tx| {
try!(work.call((tx.clone())));
dirty.call(tx)
});
let fresh = Work::new(move |tx| {
let (id, pkg_name, build_state, build_output) = all;
let new_loc = build_output.parent().unwrap().join("output");
let mut f = try!(File::open(&new_loc).map_err(|e| {
human(format!("failed to read cached build command output: {}", e))
}));
let mut contents = String::new();
try!(f.read_to_string(&mut contents));
let output = try!(BuildOutput::parse(&contents, &pkg_name));
build_state.insert(id, req, output);
fresh.call(tx)
});
Ok((dirty, fresh, freshness))
}
impl BuildState {
pub fn new(config: &super::BuildConfig,
packages: &PackageSet) -> BuildState {
let mut sources = HashMap::new();
for package in packages.iter() {
match package.manifest().links() {
Some(links) => {
sources.insert(links.to_string(),
package.package_id().clone());
}
None => {}
}
}
let mut outputs = HashMap::new();
let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host));
let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target));
for ((name, output), kind) in i1.chain(i2) {
// If no package is using the library named `name`, then this is
// just an override that we ignore.
if let Some(id) = sources.get(name) {
outputs.insert((id.clone(), kind), output.clone());
}
}
BuildState { outputs: Mutex::new(outputs) }
}
fn insert(&self, id: PackageId, req: Platform,
output: BuildOutput) {
let mut outputs = self.outputs.lock().unwrap();
match req {
Platform::Target => { outputs.insert((id, Kind::Target), output); }
Platform::Plugin => { outputs.insert((id, Kind::Host), output); }
// If this build output was for both the host and target platforms,
// we need to insert it at both places.
Platform::PluginAndTarget => {
outputs.insert((id.clone(), Kind::Host), output.clone());
outputs.insert((id, Kind::Target), output);
}
}
}
}
impl BuildOutput {
// Parses the output of a script.
// The `pkg_name` is used for error messages.
pub fn parse(input: &str, pkg_name: &str) -> CargoResult<BuildOutput> {
let mut library_paths = Vec::new();
let mut library_links = Vec::new();
let mut cfgs = Vec::new();
let mut metadata = Vec::new();
let whence = format!("build script of `{}`", pkg_name);
for line in input.lines() {
let mut iter = line.splitn(2, ':');
if iter.next()!= Some("cargo") {
// skip this line since it doesn't start with "cargo:"
continue;
}
let data = match iter.next() {
Some(val) => val,
None => continue
};
// getting the `key=value` part of the line
let mut iter = data.splitn(2, '=');
let key = iter.next();
let value = iter.next();
let (key, value) = match (key, value) {
(Some(a), Some(b)) => (a, b.trim_right()),
// line started with `cargo:` but didn't match `key=value`
_ => return Err(human(format!("Wrong output in {}: `{}`",
whence, line)))
};
match key {
"rustc-flags" => {
let (libs, links) = try!(
BuildOutput::parse_rustc_flags(value, &whence)
);
library_links.extend(links.into_iter());
library_paths.extend(libs.into_iter());
}
"rustc-link-lib" => library_links.push(value.to_string()),
"rustc-link-search" => library_paths.push(PathBuf::from(value)),
"rustc-cfg" => cfgs.push(value.to_string()),
_ => metadata.push((key.to_string(), value.to_string())),
}
}
Ok(BuildOutput {
library_paths: library_paths,
library_links: library_links,
cfgs: cfgs,
metadata: metadata,
})
}
pub fn parse_rustc_flags(value: &str, whence: &str)
-> CargoResult<(Vec<PathBuf>, Vec<String>)>
|
};
match flag {
"-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)),
// was already checked above
_ => return Err(human("only -l and -L flags are allowed"))
};
}
Ok((library_paths, library_links))
}
}
/// Compute the `build_scripts` map in the `Context` which tracks what build
/// scripts each package depends on.
///
/// The global `build_scripts` map lists for all (package, kind) tuples what set
/// of packages' build script outputs must be considered. For example this lists
/// all dependencies' `-L` flags which need to be propagated transitively.
///
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
pkg: &'b Package,
targets: &[(&'b Target, &'b Profile)]) {
let mut ret = HashMap::new();
for &(target, profile) in targets {
build(&mut ret, Kind::Target, pkg, target, profile, cx);
build(&mut ret, Kind::Host, pkg, target, profile, cx);
}
// Make the output a little more deterministic by sorting all dependencies
for (&(id, target, _, kind), slot) in ret.iter_mut() {
slot.sort();
slot.dedup();
debug!("script deps: {}/{}/{:?} => {:?}", id, target.name(), kind,
slot.iter().map(|s| s.to_string()).collect::<Vec<_>>());
}
cx.build_scripts = ret;
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<(&'b PackageId, &'b Target,
&'b Profile, Kind),
Vec<&'b PackageId>>,
kind: Kind,
pkg: &'b Package,
target: &'b Target,
profile: &'b Profile,
cx: &Context<'b, 'cfg>)
-> &'a [&'b PackageId] {
// If this target has crossed into "host-land" we need to change the
// kind that we're compiling for, and otherwise just do a quick
// pre-flight check to see if we've already calculated the set of
// dependencies.
let kind = if target.for_host() {Kind::Host} else {kind};
let id = pkg.package_id();
if out.contains_key(&(id, target, profile, kind)) {
return &out[&(id, target, profile, kind)]
}
// This loop is both the recursive and additive portion of this
// function, the key part of the logic being around determining the
// right `kind` to recurse on. If a dependency fits in the kind that
// we've got specified, then we just keep plazing a trail, but otherwise
// we *switch* the kind we're looking at because it must fit into the
// other category.
//
// We always recurse, but only add to our own array if the target is
// linkable to us (e.g. not a binary) and it's for the same original
// `kind`.
let mut ret = Vec::new();
for &(pkg, target, p) in cx.dep_targets(pkg, target, profile).iter() {
let req = cx.get_requirement(pkg, target);
let dep_kind = if req.includes(kind) {
kind
} else if kind == Kind::Target {
Kind::Host
} else {
Kind::Target
};
let dep_scripts = build(out, dep_kind, pkg, target, p, cx);
if target.linkable() && kind == dep_kind {
if pkg.has_custom_build() {
ret.push(pkg.package_id());
}
ret.extend(dep_scripts.iter().cloned());
}
}
let prev = out.entry((id, target, profile, kind)).or_insert(Vec::new());
prev.extend(ret);
return prev
}
}
|
{
let value = value.trim();
let mut flags_iter = value.split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c| !c.is_whitespace()));
let (mut library_links, mut library_paths) = (Vec::new(), Vec::new());
loop {
let flag = match flags_iter.next() {
Some(f) => f,
None => break
};
if flag != "-l" && flag != "-L" {
return Err(human(format!("Only `-l` and `-L` flags are allowed \
in {}: `{}`",
whence, value)))
}
let value = match flags_iter.next() {
Some(v) => v,
None => return Err(human(format!("Flag in rustc-flags has no \
value in {}: `{}`",
whence, value)))
|
identifier_body
|
custom_build.rs
|
use std::collections::HashMap;
use std::fs::{self, File};
use std::io::prelude::*;
use std::path::PathBuf;
use std::str;
use std::sync::Mutex;
use core::{Package, Target, PackageId, PackageSet, Profile};
use util::{CargoResult, human, Human};
use util::{internal, ChainError, profile};
use super::job::Work;
use super::{fingerprint, process, Kind, Context, Platform};
use super::CommandType;
use util::Freshness;
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug)]
pub struct BuildOutput {
/// Paths to pass to rustc with the `-L` flag
pub library_paths: Vec<PathBuf>,
/// Names and link kinds of libraries, suitable for the `-l` flag
pub library_links: Vec<String>,
/// Various `--cfg` flags to pass to the compiler
pub cfgs: Vec<String>,
/// Metadata to pass to the immediate dependencies
pub metadata: Vec<(String, String)>,
}
pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;
pub struct BuildState {
pub outputs: Mutex<BuildMap>,
}
/// Prepares a `Work` that executes the target as a custom build script.
///
/// The `req` given is the requirement which this run of the build script will
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
pub fn prepare(pkg: &Package, target: &Target, req: Platform,
cx: &mut Context) -> CargoResult<(Work, Work, Freshness)> {
let _p = profile::start(format!("build script prepare: {}/{}",
pkg, target.name()));
let kind = match req { Platform::Plugin => Kind::Host, _ => Kind::Target, };
let (script_output, build_output) = {
(cx.layout(pkg, Kind::Host).build(pkg),
cx.layout(pkg, kind).build_out(pkg))
};
// Building the command to execute
let to_exec = script_output.join(target.name());
// Start preparing the process to execute, starting out with some
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
// package's library profile.
let profile = cx.lib_profile(pkg.package_id());
let to_exec = to_exec.into_os_string();
let mut p = try!(super::process(CommandType::Host(to_exec), pkg, target, cx));
p.env("OUT_DIR", &build_output)
.env("CARGO_MANIFEST_DIR", pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
.env("TARGET", &match kind {
Kind::Host => &cx.config.rustc_info().host[..],
Kind::Target => cx.target_triple(),
})
.env("DEBUG", &profile.debuginfo.to_string())
.env("OPT_LEVEL", &profile.opt_level.to_string())
.env("PROFILE", if cx.build_config.release {"release"} else {"debug"})
.env("HOST", &cx.config.rustc_info().host);
// Be sure to pass along all enabled features for this package, this is the
// last piece of statically known information that we have.
match cx.resolve.features(pkg.package_id()) {
Some(features) => {
for feat in features.iter() {
p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1");
}
}
None => {}
}
// Gather the set of native dependencies that this package has along with
// some other variables to close over.
//
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
let not_custom = pkg.targets().iter().find(|t| {
!t.is_custom_build()
}).unwrap();
cx.dep_targets(pkg, not_custom, profile).iter().filter_map(|&(pkg, t, _)| {
if!t.linkable() { return None }
pkg.manifest().links().map(|links| {
(links.to_string(), pkg.package_id().clone())
})
}).collect::<Vec<_>>()
};
let pkg_name = pkg.to_string();
let build_state = cx.build_state.clone();
let id = pkg.package_id().clone();
let all = (id.clone(), pkg_name.clone(), build_state.clone(),
build_output.clone());
let plugin_deps = super::load_build_deps(cx, pkg, target, profile,
Kind::Host);
try!(fs::create_dir_all(&cx.layout(pkg, Kind::Target).build(pkg)));
try!(fs::create_dir_all(&cx.layout(pkg, Kind::Host).build(pkg)));
let exec_engine = cx.exec_engine.clone();
// Prepare the unit of "dirty work" which will actually run the custom build
// command.
//
// Note that this has to do some extra work just before running the command
// to determine extra environment variables and such.
let work = Work::new(move |desc_tx| {
// Make sure that OUT_DIR exists.
//
// If we have an old build directory, then just move it into place,
// otherwise create it!
if fs::metadata(&build_output).is_err() {
try!(fs::create_dir(&build_output).chain_error(|| {
internal("failed to create script output directory for \
build command")
}));
}
// For all our native lib dependencies, pick up their metadata to pass
// along to this custom build command. We're also careful to augment our
// dynamic library search path in case the build script depended on any
// native dynamic libraries.
{
let build_state = build_state.outputs.lock().unwrap();
for &(ref name, ref id) in lib_deps.iter() {
let data = &build_state[&(id.clone(), kind)].metadata;
for &(ref key, ref value) in data.iter() {
p.env(&format!("DEP_{}_{}", super::envify(name),
super::envify(key)), value);
}
}
try!(super::add_plugin_deps(&mut p, &build_state, plugin_deps));
}
// And now finally, run the build command itself!
desc_tx.send(p.to_string()).ok();
let output = try!(exec_engine.exec_with_output(p).map_err(|mut e| {
e.desc = format!("failed to run custom build command for `{}`\n{}",
pkg_name, e.desc);
Human(e)
}));
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness).
//
// This is also the location where we provide feedback into the build
// state informing what variables were discovered via our script as
// well.
let output = try!(str::from_utf8(&output.stdout).map_err(|_| {
human("build script output was not valid utf-8")
}));
let parsed_output = try!(BuildOutput::parse(output, &pkg_name));
build_state.insert(id, req, parsed_output);
try!(File::create(&build_output.parent().unwrap().join("output"))
.and_then(|mut f| f.write_all(output.as_bytes()))
.map_err(|e| {
human(format!("failed to write output of custom build command: {}",
e))
}));
Ok(())
});
// Now that we've prepared our work-to-do, we need to prepare the fresh work
// itself to run when we actually end up just discarding what we calculated
// above.
//
// Note that the freshness calculation here is the build_cmd freshness, not
// target specific freshness. This is because we don't actually know what
// the inputs are to this command!
//
// Also note that a fresh build command needs to
let (freshness, dirty, fresh) =
try!(fingerprint::prepare_build_cmd(cx, pkg, kind));
let dirty = Work::new(move |tx| {
try!(work.call((tx.clone())));
dirty.call(tx)
});
let fresh = Work::new(move |tx| {
let (id, pkg_name, build_state, build_output) = all;
let new_loc = build_output.parent().unwrap().join("output");
let mut f = try!(File::open(&new_loc).map_err(|e| {
human(format!("failed to read cached build command output: {}", e))
}));
let mut contents = String::new();
try!(f.read_to_string(&mut contents));
let output = try!(BuildOutput::parse(&contents, &pkg_name));
build_state.insert(id, req, output);
fresh.call(tx)
});
Ok((dirty, fresh, freshness))
}
impl BuildState {
pub fn new(config: &super::BuildConfig,
packages: &PackageSet) -> BuildState {
let mut sources = HashMap::new();
for package in packages.iter() {
match package.manifest().links() {
Some(links) => {
sources.insert(links.to_string(),
package.package_id().clone());
}
None => {}
}
}
let mut outputs = HashMap::new();
let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host));
let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target));
for ((name, output), kind) in i1.chain(i2) {
// If no package is using the library named `name`, then this is
// just an override that we ignore.
if let Some(id) = sources.get(name) {
outputs.insert((id.clone(), kind), output.clone());
}
}
BuildState { outputs: Mutex::new(outputs) }
}
fn insert(&self, id: PackageId, req: Platform,
output: BuildOutput) {
let mut outputs = self.outputs.lock().unwrap();
match req {
Platform::Target => { outputs.insert((id, Kind::Target), output); }
Platform::Plugin => { outputs.insert((id, Kind::Host), output); }
// If this build output was for both the host and target platforms,
// we need to insert it at both places.
Platform::PluginAndTarget => {
outputs.insert((id.clone(), Kind::Host), output.clone());
outputs.insert((id, Kind::Target), output);
}
}
}
}
impl BuildOutput {
// Parses the output of a script.
// The `pkg_name` is used for error messages.
pub fn parse(input: &str, pkg_name: &str) -> CargoResult<BuildOutput> {
let mut library_paths = Vec::new();
let mut library_links = Vec::new();
let mut cfgs = Vec::new();
let mut metadata = Vec::new();
let whence = format!("build script of `{}`", pkg_name);
for line in input.lines() {
let mut iter = line.splitn(2, ':');
if iter.next()!= Some("cargo") {
// skip this line since it doesn't start with "cargo:"
continue;
}
let data = match iter.next() {
Some(val) => val,
None => continue
};
// getting the `key=value` part of the line
let mut iter = data.splitn(2, '=');
let key = iter.next();
let value = iter.next();
let (key, value) = match (key, value) {
(Some(a), Some(b)) => (a, b.trim_right()),
// line started with `cargo:` but didn't match `key=value`
_ => return Err(human(format!("Wrong output in {}: `{}`",
whence, line)))
};
match key {
"rustc-flags" => {
let (libs, links) = try!(
BuildOutput::parse_rustc_flags(value, &whence)
);
library_links.extend(links.into_iter());
library_paths.extend(libs.into_iter());
}
"rustc-link-lib" => library_links.push(value.to_string()),
"rustc-link-search" => library_paths.push(PathBuf::from(value)),
"rustc-cfg" => cfgs.push(value.to_string()),
_ => metadata.push((key.to_string(), value.to_string())),
}
}
Ok(BuildOutput {
library_paths: library_paths,
library_links: library_links,
cfgs: cfgs,
metadata: metadata,
})
}
pub fn parse_rustc_flags(value: &str, whence: &str)
-> CargoResult<(Vec<PathBuf>, Vec<String>)> {
let value = value.trim();
let mut flags_iter = value.split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c|!c.is_whitespace()));
let (mut library_links, mut library_paths) = (Vec::new(), Vec::new());
loop {
let flag = match flags_iter.next() {
Some(f) => f,
None => break
};
if flag!= "-l" && flag!= "-L" {
return Err(human(format!("Only `-l` and `-L` flags are allowed \
in {}: `{}`",
whence, value)))
}
let value = match flags_iter.next() {
Some(v) => v,
None => return Err(human(format!("Flag in rustc-flags has no \
value in {}: `{}`",
whence, value)))
};
match flag {
"-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)),
// was already checked above
_ => return Err(human("only -l and -L flags are allowed"))
};
}
Ok((library_paths, library_links))
}
}
/// Compute the `build_scripts` map in the `Context` which tracks what build
/// scripts each package depends on.
///
/// The global `build_scripts` map lists for all (package, kind) tuples what set
/// of packages' build script outputs must be considered. For example this lists
/// all dependencies' `-L` flags which need to be propagated transitively.
///
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
pkg: &'b Package,
targets: &[(&'b Target, &'b Profile)]) {
let mut ret = HashMap::new();
for &(target, profile) in targets {
build(&mut ret, Kind::Target, pkg, target, profile, cx);
build(&mut ret, Kind::Host, pkg, target, profile, cx);
}
// Make the output a little more deterministic by sorting all dependencies
for (&(id, target, _, kind), slot) in ret.iter_mut() {
slot.sort();
slot.dedup();
debug!("script deps: {}/{}/{:?} => {:?}", id, target.name(), kind,
slot.iter().map(|s| s.to_string()).collect::<Vec<_>>());
}
cx.build_scripts = ret;
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<(&'b PackageId, &'b Target,
&'b Profile, Kind),
Vec<&'b PackageId>>,
kind: Kind,
pkg: &'b Package,
target: &'b Target,
profile: &'b Profile,
cx: &Context<'b, 'cfg>)
-> &'a [&'b PackageId] {
// If this target has crossed into "host-land" we need to change the
// kind that we're compiling for, and otherwise just do a quick
// pre-flight check to see if we've already calculated the set of
// dependencies.
let kind = if target.for_host() {Kind::Host} else {kind};
let id = pkg.package_id();
if out.contains_key(&(id, target, profile, kind))
|
// This loop is both the recursive and additive portion of this
// function, the key part of the logic being around determining the
// right `kind` to recurse on. If a dependency fits in the kind that
// we've got specified, then we just keep plazing a trail, but otherwise
// we *switch* the kind we're looking at because it must fit into the
// other category.
//
// We always recurse, but only add to our own array if the target is
// linkable to us (e.g. not a binary) and it's for the same original
// `kind`.
let mut ret = Vec::new();
for &(pkg, target, p) in cx.dep_targets(pkg, target, profile).iter() {
let req = cx.get_requirement(pkg, target);
let dep_kind = if req.includes(kind) {
kind
} else if kind == Kind::Target {
Kind::Host
} else {
Kind::Target
};
let dep_scripts = build(out, dep_kind, pkg, target, p, cx);
if target.linkable() && kind == dep_kind {
if pkg.has_custom_build() {
ret.push(pkg.package_id());
}
ret.extend(dep_scripts.iter().cloned());
}
}
let prev = out.entry((id, target, profile, kind)).or_insert(Vec::new());
prev.extend(ret);
return prev
}
}
|
{
return &out[&(id, target, profile, kind)]
}
|
conditional_block
|
minmax.rs
|
/// `MinMaxResult` is an enum returned by `minmax`.
///
/// See [`.minmax()`](crate::Itertools::minmax) for more detail.
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum MinMaxResult<T> {
/// Empty iterator
NoElements,
/// Iterator with one element, so the minimum and maximum are the same
OneElement(T),
/// More than one element in the iterator, the first element is not larger
/// than the second
MinMax(T, T)
}
impl<T: Clone> MinMaxResult<T> {
/// `into_option` creates an `Option` of type `(T, T)`. The returned `Option`
/// has variant `None` if and only if the `MinMaxResult` has variant
/// `NoElements`. Otherwise `Some((x, y))` is returned where `x <= y`.
/// If the `MinMaxResult` has variant `OneElement(x)`, performing this
/// operation will make one clone of `x`.
///
/// # Examples
///
/// ```
/// use itertools::MinMaxResult::{self, NoElements, OneElement, MinMax};
///
/// let r: MinMaxResult<i32> = NoElements;
/// assert_eq!(r.into_option(), None);
///
/// let r = OneElement(1);
/// assert_eq!(r.into_option(), Some((1, 1)));
///
/// let r = MinMax(1, 2);
/// assert_eq!(r.into_option(), Some((1, 2)));
/// ```
pub fn into_option(self) -> Option<(T,T)> {
match self {
MinMaxResult::NoElements => None,
MinMaxResult::OneElement(x) => Some((x.clone(), x)),
MinMaxResult::MinMax(x, y) => Some((x, y))
}
}
}
/// Implementation guts for `minmax` and `minmax_by_key`.
pub fn minmax_impl<I, K, F, L>(mut it: I, mut key_for: F,
mut lt: L) -> MinMaxResult<I::Item>
where I: Iterator,
F: FnMut(&I::Item) -> K,
L: FnMut(&I::Item, &I::Item, &K, &K) -> bool,
{
let (mut min, mut max, mut min_key, mut max_key) = match it.next() {
None => return MinMaxResult::NoElements,
Some(x) => {
|
None => return MinMaxResult::OneElement(x),
Some(y) => {
let xk = key_for(&x);
let yk = key_for(&y);
if!lt(&y, &x, &yk, &xk) {(x, y, xk, yk)} else {(y, x, yk, xk)}
}
}
}
};
loop {
// `first` and `second` are the two next elements we want to look
// at. We first compare `first` and `second` (#1). The smaller one
// is then compared to current minimum (#2). The larger one is
// compared to current maximum (#3). This way we do 3 comparisons
// for 2 elements.
let first = match it.next() {
None => break,
Some(x) => x
};
let second = match it.next() {
None => {
let first_key = key_for(&first);
if lt(&first, &min, &first_key, &min_key) {
min = first;
} else if!lt(&first, &max, &first_key, &max_key) {
max = first;
}
break;
}
Some(x) => x
};
let first_key = key_for(&first);
let second_key = key_for(&second);
if!lt(&second, &first, &second_key, &first_key) {
if lt(&first, &min, &first_key, &min_key) {
min = first;
min_key = first_key;
}
if!lt(&second, &max, &second_key, &max_key) {
max = second;
max_key = second_key;
}
} else {
if lt(&second, &min, &second_key, &min_key) {
min = second;
min_key = second_key;
}
if!lt(&first, &max, &first_key, &max_key) {
max = first;
max_key = first_key;
}
}
}
MinMaxResult::MinMax(min, max)
}
|
match it.next() {
|
random_line_split
|
minmax.rs
|
/// `MinMaxResult` is an enum returned by `minmax`.
///
/// See [`.minmax()`](crate::Itertools::minmax) for more detail.
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum MinMaxResult<T> {
/// Empty iterator
NoElements,
/// Iterator with one element, so the minimum and maximum are the same
OneElement(T),
/// More than one element in the iterator, the first element is not larger
/// than the second
MinMax(T, T)
}
impl<T: Clone> MinMaxResult<T> {
/// `into_option` creates an `Option` of type `(T, T)`. The returned `Option`
/// has variant `None` if and only if the `MinMaxResult` has variant
/// `NoElements`. Otherwise `Some((x, y))` is returned where `x <= y`.
/// If the `MinMaxResult` has variant `OneElement(x)`, performing this
/// operation will make one clone of `x`.
///
/// # Examples
///
/// ```
/// use itertools::MinMaxResult::{self, NoElements, OneElement, MinMax};
///
/// let r: MinMaxResult<i32> = NoElements;
/// assert_eq!(r.into_option(), None);
///
/// let r = OneElement(1);
/// assert_eq!(r.into_option(), Some((1, 1)));
///
/// let r = MinMax(1, 2);
/// assert_eq!(r.into_option(), Some((1, 2)));
/// ```
pub fn into_option(self) -> Option<(T,T)>
|
}
/// Implementation guts for `minmax` and `minmax_by_key`.
pub fn minmax_impl<I, K, F, L>(mut it: I, mut key_for: F,
mut lt: L) -> MinMaxResult<I::Item>
where I: Iterator,
F: FnMut(&I::Item) -> K,
L: FnMut(&I::Item, &I::Item, &K, &K) -> bool,
{
let (mut min, mut max, mut min_key, mut max_key) = match it.next() {
None => return MinMaxResult::NoElements,
Some(x) => {
match it.next() {
None => return MinMaxResult::OneElement(x),
Some(y) => {
let xk = key_for(&x);
let yk = key_for(&y);
if!lt(&y, &x, &yk, &xk) {(x, y, xk, yk)} else {(y, x, yk, xk)}
}
}
}
};
loop {
// `first` and `second` are the two next elements we want to look
// at. We first compare `first` and `second` (#1). The smaller one
// is then compared to current minimum (#2). The larger one is
// compared to current maximum (#3). This way we do 3 comparisons
// for 2 elements.
let first = match it.next() {
None => break,
Some(x) => x
};
let second = match it.next() {
None => {
let first_key = key_for(&first);
if lt(&first, &min, &first_key, &min_key) {
min = first;
} else if!lt(&first, &max, &first_key, &max_key) {
max = first;
}
break;
}
Some(x) => x
};
let first_key = key_for(&first);
let second_key = key_for(&second);
if!lt(&second, &first, &second_key, &first_key) {
if lt(&first, &min, &first_key, &min_key) {
min = first;
min_key = first_key;
}
if!lt(&second, &max, &second_key, &max_key) {
max = second;
max_key = second_key;
}
} else {
if lt(&second, &min, &second_key, &min_key) {
min = second;
min_key = second_key;
}
if!lt(&first, &max, &first_key, &max_key) {
max = first;
max_key = first_key;
}
}
}
MinMaxResult::MinMax(min, max)
}
|
{
match self {
MinMaxResult::NoElements => None,
MinMaxResult::OneElement(x) => Some((x.clone(), x)),
MinMaxResult::MinMax(x, y) => Some((x, y))
}
}
|
identifier_body
|
minmax.rs
|
/// `MinMaxResult` is an enum returned by `minmax`.
///
/// See [`.minmax()`](crate::Itertools::minmax) for more detail.
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum MinMaxResult<T> {
/// Empty iterator
NoElements,
/// Iterator with one element, so the minimum and maximum are the same
OneElement(T),
/// More than one element in the iterator, the first element is not larger
/// than the second
MinMax(T, T)
}
impl<T: Clone> MinMaxResult<T> {
/// `into_option` creates an `Option` of type `(T, T)`. The returned `Option`
/// has variant `None` if and only if the `MinMaxResult` has variant
/// `NoElements`. Otherwise `Some((x, y))` is returned where `x <= y`.
/// If the `MinMaxResult` has variant `OneElement(x)`, performing this
/// operation will make one clone of `x`.
///
/// # Examples
///
/// ```
/// use itertools::MinMaxResult::{self, NoElements, OneElement, MinMax};
///
/// let r: MinMaxResult<i32> = NoElements;
/// assert_eq!(r.into_option(), None);
///
/// let r = OneElement(1);
/// assert_eq!(r.into_option(), Some((1, 1)));
///
/// let r = MinMax(1, 2);
/// assert_eq!(r.into_option(), Some((1, 2)));
/// ```
pub fn into_option(self) -> Option<(T,T)> {
match self {
MinMaxResult::NoElements => None,
MinMaxResult::OneElement(x) => Some((x.clone(), x)),
MinMaxResult::MinMax(x, y) => Some((x, y))
}
}
}
/// Implementation guts for `minmax` and `minmax_by_key`.
pub fn minmax_impl<I, K, F, L>(mut it: I, mut key_for: F,
mut lt: L) -> MinMaxResult<I::Item>
where I: Iterator,
F: FnMut(&I::Item) -> K,
L: FnMut(&I::Item, &I::Item, &K, &K) -> bool,
{
let (mut min, mut max, mut min_key, mut max_key) = match it.next() {
None => return MinMaxResult::NoElements,
Some(x) => {
match it.next() {
None => return MinMaxResult::OneElement(x),
Some(y) => {
let xk = key_for(&x);
let yk = key_for(&y);
if!lt(&y, &x, &yk, &xk) {(x, y, xk, yk)} else {(y, x, yk, xk)}
}
}
}
};
loop {
// `first` and `second` are the two next elements we want to look
// at. We first compare `first` and `second` (#1). The smaller one
// is then compared to current minimum (#2). The larger one is
// compared to current maximum (#3). This way we do 3 comparisons
// for 2 elements.
let first = match it.next() {
None => break,
Some(x) => x
};
let second = match it.next() {
None => {
let first_key = key_for(&first);
if lt(&first, &min, &first_key, &min_key)
|
else if!lt(&first, &max, &first_key, &max_key) {
max = first;
}
break;
}
Some(x) => x
};
let first_key = key_for(&first);
let second_key = key_for(&second);
if!lt(&second, &first, &second_key, &first_key) {
if lt(&first, &min, &first_key, &min_key) {
min = first;
min_key = first_key;
}
if!lt(&second, &max, &second_key, &max_key) {
max = second;
max_key = second_key;
}
} else {
if lt(&second, &min, &second_key, &min_key) {
min = second;
min_key = second_key;
}
if!lt(&first, &max, &first_key, &max_key) {
max = first;
max_key = first_key;
}
}
}
MinMaxResult::MinMax(min, max)
}
|
{
min = first;
}
|
conditional_block
|
minmax.rs
|
/// `MinMaxResult` is an enum returned by `minmax`.
///
/// See [`.minmax()`](crate::Itertools::minmax) for more detail.
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum MinMaxResult<T> {
/// Empty iterator
NoElements,
/// Iterator with one element, so the minimum and maximum are the same
OneElement(T),
/// More than one element in the iterator, the first element is not larger
/// than the second
MinMax(T, T)
}
impl<T: Clone> MinMaxResult<T> {
/// `into_option` creates an `Option` of type `(T, T)`. The returned `Option`
/// has variant `None` if and only if the `MinMaxResult` has variant
/// `NoElements`. Otherwise `Some((x, y))` is returned where `x <= y`.
/// If the `MinMaxResult` has variant `OneElement(x)`, performing this
/// operation will make one clone of `x`.
///
/// # Examples
///
/// ```
/// use itertools::MinMaxResult::{self, NoElements, OneElement, MinMax};
///
/// let r: MinMaxResult<i32> = NoElements;
/// assert_eq!(r.into_option(), None);
///
/// let r = OneElement(1);
/// assert_eq!(r.into_option(), Some((1, 1)));
///
/// let r = MinMax(1, 2);
/// assert_eq!(r.into_option(), Some((1, 2)));
/// ```
pub fn into_option(self) -> Option<(T,T)> {
match self {
MinMaxResult::NoElements => None,
MinMaxResult::OneElement(x) => Some((x.clone(), x)),
MinMaxResult::MinMax(x, y) => Some((x, y))
}
}
}
/// Implementation guts for `minmax` and `minmax_by_key`.
pub fn
|
<I, K, F, L>(mut it: I, mut key_for: F,
mut lt: L) -> MinMaxResult<I::Item>
where I: Iterator,
F: FnMut(&I::Item) -> K,
L: FnMut(&I::Item, &I::Item, &K, &K) -> bool,
{
let (mut min, mut max, mut min_key, mut max_key) = match it.next() {
None => return MinMaxResult::NoElements,
Some(x) => {
match it.next() {
None => return MinMaxResult::OneElement(x),
Some(y) => {
let xk = key_for(&x);
let yk = key_for(&y);
if!lt(&y, &x, &yk, &xk) {(x, y, xk, yk)} else {(y, x, yk, xk)}
}
}
}
};
loop {
// `first` and `second` are the two next elements we want to look
// at. We first compare `first` and `second` (#1). The smaller one
// is then compared to current minimum (#2). The larger one is
// compared to current maximum (#3). This way we do 3 comparisons
// for 2 elements.
let first = match it.next() {
None => break,
Some(x) => x
};
let second = match it.next() {
None => {
let first_key = key_for(&first);
if lt(&first, &min, &first_key, &min_key) {
min = first;
} else if!lt(&first, &max, &first_key, &max_key) {
max = first;
}
break;
}
Some(x) => x
};
let first_key = key_for(&first);
let second_key = key_for(&second);
if!lt(&second, &first, &second_key, &first_key) {
if lt(&first, &min, &first_key, &min_key) {
min = first;
min_key = first_key;
}
if!lt(&second, &max, &second_key, &max_key) {
max = second;
max_key = second_key;
}
} else {
if lt(&second, &min, &second_key, &min_key) {
min = second;
min_key = second_key;
}
if!lt(&first, &max, &first_key, &max_key) {
max = first;
max_key = first_key;
}
}
}
MinMaxResult::MinMax(min, max)
}
|
minmax_impl
|
identifier_name
|
datastore.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::io::Cursor;
use std::io::Write;
use anyhow::format_err;
use anyhow::Result;
use byteorder::BigEndian;
use byteorder::ReadBytesExt;
use byteorder::WriteBytesExt;
#[cfg(any(test, feature = "for-tests"))]
use quickcheck_arbitrary_derive::Arbitrary;
use serde_derive::Deserialize;
use serde_derive::Serialize;
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub struct Metadata {
pub size: Option<u64>,
pub flags: Option<u64>,
}
impl Metadata {
pub const LFS_FLAG: u64 = 0x2000;
/// Returns true if the blob retrieved from `DataStore::get` is an LFS pointer.
pub fn is_lfs(&self) -> bool {
match self.flags {
None => false,
Some(flag) => (flag & Metadata::LFS_FLAG) == Metadata::LFS_FLAG,
}
}
pub fn write<T: Write>(&self, writer: &mut T) -> Result<()> {
let mut buf = vec![];
if let Some(flags) = self.flags {
if flags!= 0 {
Metadata::write_meta(b'f', flags, &mut buf)?;
}
}
if let Some(size) = self.size {
Metadata::write_meta(b's', size, &mut buf)?;
}
writer.write_u32::<BigEndian>(buf.len() as u32)?;
writer.write_all(buf.as_ref())?;
Ok(())
}
fn write_meta<T: Write>(flag: u8, value: u64, writer: &mut T) -> Result<()> {
writer.write_u8(flag as u8)?;
writer.write_u16::<BigEndian>(u64_to_bin_len(value))?;
u64_to_bin(value, writer)?;
Ok(())
}
pub fn read(cur: &mut Cursor<&[u8]>) -> Result<Metadata> {
let metadata_len = cur.read_u32::<BigEndian>()? as u64;
let mut size: Option<u64> = None;
let mut flags: Option<u64> = None;
let start_offset = cur.position();
while cur.position() < start_offset + metadata_len {
let key = cur.read_u8()?;
let value_len = cur.read_u16::<BigEndian>()? as usize;
match key {
b'f' => {
let buf = cur.get_ref();
flags = Some(bin_to_u64(
&buf[cur.position() as usize..cur.position() as usize + value_len],
));
}
b's' => {
let buf = cur.get_ref();
size = Some(bin_to_u64(
&buf[cur.position() as usize..cur.position() as usize + value_len],
));
}
_ => return Err(format_err!("invalid metadata format '{:?}'", key)),
}
let cur_pos = cur.position();
cur.set_position(cur_pos + value_len as u64);
}
Ok(Metadata { flags, size })
}
}
/// Precompute the size of a u64 when it is serialized
fn u64_to_bin_len(value: u64) -> u16 {
let mut value = value;
let mut count = 0;
while value > 0 {
count += 1;
value >>= 8;
}
count
}
/// Converts an integer into a buffer using a special format used in the datapack format.
fn u64_to_bin<T: Write>(value: u64, writer: &mut T) -> Result<()> {
let mut value = value;
|
buf[pos] = value as u8;
value >>= 8;
}
assert!(value == 0 && pos == 0);
writer.write_all(&buf[0..len])?;
Ok(())
}
/// Converts a buffer to an integer using a special format used in the datapack format.
fn bin_to_u64(buf: &[u8]) -> u64 {
let mut n: u64 = 0;
for byte in buf.iter() {
n <<= 8;
n |= *byte as u64;
}
n
}
#[cfg(test)]
mod tests {
use quickcheck::quickcheck;
use super::*;
quickcheck! {
fn test_roundtrip_bin_to_u64(value: u64) -> bool {
let mut buf: Vec<u8> = vec![];
u64_to_bin(value, &mut buf).unwrap();
if buf.len()!= u64_to_bin_len(value) as usize {
return false;
}
let new_value = bin_to_u64(&buf);
value == new_value
}
fn test_roundtrip_metadata(size: Option<u64>, flags: Option<u64>) -> bool {
let meta = Metadata { size, flags };
let mut buf: Vec<u8> = vec![];
meta.write(&mut buf).expect("write");
let read_meta = Metadata::read(&mut Cursor::new(&buf)).expect("read");
meta.size == read_meta.size && (meta.flags == read_meta.flags || meta.flags.map_or(false, |v| v == 0))
}
}
}
|
let mut buf = [0; 8];
let len = u64_to_bin_len(value) as usize;
let mut pos = len;
while value > 0 {
pos -= 1;
|
random_line_split
|
datastore.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::io::Cursor;
use std::io::Write;
use anyhow::format_err;
use anyhow::Result;
use byteorder::BigEndian;
use byteorder::ReadBytesExt;
use byteorder::WriteBytesExt;
#[cfg(any(test, feature = "for-tests"))]
use quickcheck_arbitrary_derive::Arbitrary;
use serde_derive::Deserialize;
use serde_derive::Serialize;
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub struct Metadata {
pub size: Option<u64>,
pub flags: Option<u64>,
}
impl Metadata {
pub const LFS_FLAG: u64 = 0x2000;
/// Returns true if the blob retrieved from `DataStore::get` is an LFS pointer.
pub fn is_lfs(&self) -> bool {
match self.flags {
None => false,
Some(flag) => (flag & Metadata::LFS_FLAG) == Metadata::LFS_FLAG,
}
}
pub fn write<T: Write>(&self, writer: &mut T) -> Result<()> {
let mut buf = vec![];
if let Some(flags) = self.flags {
if flags!= 0 {
Metadata::write_meta(b'f', flags, &mut buf)?;
}
}
if let Some(size) = self.size {
Metadata::write_meta(b's', size, &mut buf)?;
}
writer.write_u32::<BigEndian>(buf.len() as u32)?;
writer.write_all(buf.as_ref())?;
Ok(())
}
fn write_meta<T: Write>(flag: u8, value: u64, writer: &mut T) -> Result<()> {
writer.write_u8(flag as u8)?;
writer.write_u16::<BigEndian>(u64_to_bin_len(value))?;
u64_to_bin(value, writer)?;
Ok(())
}
pub fn read(cur: &mut Cursor<&[u8]>) -> Result<Metadata> {
let metadata_len = cur.read_u32::<BigEndian>()? as u64;
let mut size: Option<u64> = None;
let mut flags: Option<u64> = None;
let start_offset = cur.position();
while cur.position() < start_offset + metadata_len {
let key = cur.read_u8()?;
let value_len = cur.read_u16::<BigEndian>()? as usize;
match key {
b'f' => {
let buf = cur.get_ref();
flags = Some(bin_to_u64(
&buf[cur.position() as usize..cur.position() as usize + value_len],
));
}
b's' => {
let buf = cur.get_ref();
size = Some(bin_to_u64(
&buf[cur.position() as usize..cur.position() as usize + value_len],
));
}
_ => return Err(format_err!("invalid metadata format '{:?}'", key)),
}
let cur_pos = cur.position();
cur.set_position(cur_pos + value_len as u64);
}
Ok(Metadata { flags, size })
}
}
/// Precompute the size of a u64 when it is serialized
fn
|
(value: u64) -> u16 {
let mut value = value;
let mut count = 0;
while value > 0 {
count += 1;
value >>= 8;
}
count
}
/// Converts an integer into a buffer using a special format used in the datapack format.
fn u64_to_bin<T: Write>(value: u64, writer: &mut T) -> Result<()> {
let mut value = value;
let mut buf = [0; 8];
let len = u64_to_bin_len(value) as usize;
let mut pos = len;
while value > 0 {
pos -= 1;
buf[pos] = value as u8;
value >>= 8;
}
assert!(value == 0 && pos == 0);
writer.write_all(&buf[0..len])?;
Ok(())
}
/// Converts a buffer to an integer using a special format used in the datapack format.
fn bin_to_u64(buf: &[u8]) -> u64 {
let mut n: u64 = 0;
for byte in buf.iter() {
n <<= 8;
n |= *byte as u64;
}
n
}
#[cfg(test)]
mod tests {
use quickcheck::quickcheck;
use super::*;
quickcheck! {
fn test_roundtrip_bin_to_u64(value: u64) -> bool {
let mut buf: Vec<u8> = vec![];
u64_to_bin(value, &mut buf).unwrap();
if buf.len()!= u64_to_bin_len(value) as usize {
return false;
}
let new_value = bin_to_u64(&buf);
value == new_value
}
fn test_roundtrip_metadata(size: Option<u64>, flags: Option<u64>) -> bool {
let meta = Metadata { size, flags };
let mut buf: Vec<u8> = vec![];
meta.write(&mut buf).expect("write");
let read_meta = Metadata::read(&mut Cursor::new(&buf)).expect("read");
meta.size == read_meta.size && (meta.flags == read_meta.flags || meta.flags.map_or(false, |v| v == 0))
}
}
}
|
u64_to_bin_len
|
identifier_name
|
datastore.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::io::Cursor;
use std::io::Write;
use anyhow::format_err;
use anyhow::Result;
use byteorder::BigEndian;
use byteorder::ReadBytesExt;
use byteorder::WriteBytesExt;
#[cfg(any(test, feature = "for-tests"))]
use quickcheck_arbitrary_derive::Arbitrary;
use serde_derive::Deserialize;
use serde_derive::Serialize;
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub struct Metadata {
pub size: Option<u64>,
pub flags: Option<u64>,
}
impl Metadata {
pub const LFS_FLAG: u64 = 0x2000;
/// Returns true if the blob retrieved from `DataStore::get` is an LFS pointer.
pub fn is_lfs(&self) -> bool {
match self.flags {
None => false,
Some(flag) => (flag & Metadata::LFS_FLAG) == Metadata::LFS_FLAG,
}
}
pub fn write<T: Write>(&self, writer: &mut T) -> Result<()> {
let mut buf = vec![];
if let Some(flags) = self.flags {
if flags!= 0
|
}
if let Some(size) = self.size {
Metadata::write_meta(b's', size, &mut buf)?;
}
writer.write_u32::<BigEndian>(buf.len() as u32)?;
writer.write_all(buf.as_ref())?;
Ok(())
}
fn write_meta<T: Write>(flag: u8, value: u64, writer: &mut T) -> Result<()> {
writer.write_u8(flag as u8)?;
writer.write_u16::<BigEndian>(u64_to_bin_len(value))?;
u64_to_bin(value, writer)?;
Ok(())
}
pub fn read(cur: &mut Cursor<&[u8]>) -> Result<Metadata> {
let metadata_len = cur.read_u32::<BigEndian>()? as u64;
let mut size: Option<u64> = None;
let mut flags: Option<u64> = None;
let start_offset = cur.position();
while cur.position() < start_offset + metadata_len {
let key = cur.read_u8()?;
let value_len = cur.read_u16::<BigEndian>()? as usize;
match key {
b'f' => {
let buf = cur.get_ref();
flags = Some(bin_to_u64(
&buf[cur.position() as usize..cur.position() as usize + value_len],
));
}
b's' => {
let buf = cur.get_ref();
size = Some(bin_to_u64(
&buf[cur.position() as usize..cur.position() as usize + value_len],
));
}
_ => return Err(format_err!("invalid metadata format '{:?}'", key)),
}
let cur_pos = cur.position();
cur.set_position(cur_pos + value_len as u64);
}
Ok(Metadata { flags, size })
}
}
/// Precompute the size of a u64 when it is serialized
fn u64_to_bin_len(value: u64) -> u16 {
let mut value = value;
let mut count = 0;
while value > 0 {
count += 1;
value >>= 8;
}
count
}
/// Converts an integer into a buffer using a special format used in the datapack format.
fn u64_to_bin<T: Write>(value: u64, writer: &mut T) -> Result<()> {
let mut value = value;
let mut buf = [0; 8];
let len = u64_to_bin_len(value) as usize;
let mut pos = len;
while value > 0 {
pos -= 1;
buf[pos] = value as u8;
value >>= 8;
}
assert!(value == 0 && pos == 0);
writer.write_all(&buf[0..len])?;
Ok(())
}
/// Converts a buffer to an integer using a special format used in the datapack format.
fn bin_to_u64(buf: &[u8]) -> u64 {
let mut n: u64 = 0;
for byte in buf.iter() {
n <<= 8;
n |= *byte as u64;
}
n
}
#[cfg(test)]
mod tests {
use quickcheck::quickcheck;
use super::*;
quickcheck! {
fn test_roundtrip_bin_to_u64(value: u64) -> bool {
let mut buf: Vec<u8> = vec![];
u64_to_bin(value, &mut buf).unwrap();
if buf.len()!= u64_to_bin_len(value) as usize {
return false;
}
let new_value = bin_to_u64(&buf);
value == new_value
}
fn test_roundtrip_metadata(size: Option<u64>, flags: Option<u64>) -> bool {
let meta = Metadata { size, flags };
let mut buf: Vec<u8> = vec![];
meta.write(&mut buf).expect("write");
let read_meta = Metadata::read(&mut Cursor::new(&buf)).expect("read");
meta.size == read_meta.size && (meta.flags == read_meta.flags || meta.flags.map_or(false, |v| v == 0))
}
}
}
|
{
Metadata::write_meta(b'f', flags, &mut buf)?;
}
|
conditional_block
|
datastore.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::io::Cursor;
use std::io::Write;
use anyhow::format_err;
use anyhow::Result;
use byteorder::BigEndian;
use byteorder::ReadBytesExt;
use byteorder::WriteBytesExt;
#[cfg(any(test, feature = "for-tests"))]
use quickcheck_arbitrary_derive::Arbitrary;
use serde_derive::Deserialize;
use serde_derive::Serialize;
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub struct Metadata {
pub size: Option<u64>,
pub flags: Option<u64>,
}
impl Metadata {
pub const LFS_FLAG: u64 = 0x2000;
/// Returns true if the blob retrieved from `DataStore::get` is an LFS pointer.
pub fn is_lfs(&self) -> bool {
match self.flags {
None => false,
Some(flag) => (flag & Metadata::LFS_FLAG) == Metadata::LFS_FLAG,
}
}
pub fn write<T: Write>(&self, writer: &mut T) -> Result<()>
|
fn write_meta<T: Write>(flag: u8, value: u64, writer: &mut T) -> Result<()> {
writer.write_u8(flag as u8)?;
writer.write_u16::<BigEndian>(u64_to_bin_len(value))?;
u64_to_bin(value, writer)?;
Ok(())
}
pub fn read(cur: &mut Cursor<&[u8]>) -> Result<Metadata> {
let metadata_len = cur.read_u32::<BigEndian>()? as u64;
let mut size: Option<u64> = None;
let mut flags: Option<u64> = None;
let start_offset = cur.position();
while cur.position() < start_offset + metadata_len {
let key = cur.read_u8()?;
let value_len = cur.read_u16::<BigEndian>()? as usize;
match key {
b'f' => {
let buf = cur.get_ref();
flags = Some(bin_to_u64(
&buf[cur.position() as usize..cur.position() as usize + value_len],
));
}
b's' => {
let buf = cur.get_ref();
size = Some(bin_to_u64(
&buf[cur.position() as usize..cur.position() as usize + value_len],
));
}
_ => return Err(format_err!("invalid metadata format '{:?}'", key)),
}
let cur_pos = cur.position();
cur.set_position(cur_pos + value_len as u64);
}
Ok(Metadata { flags, size })
}
}
/// Precompute the size of a u64 when it is serialized
fn u64_to_bin_len(value: u64) -> u16 {
let mut value = value;
let mut count = 0;
while value > 0 {
count += 1;
value >>= 8;
}
count
}
/// Converts an integer into a buffer using a special format used in the datapack format.
fn u64_to_bin<T: Write>(value: u64, writer: &mut T) -> Result<()> {
let mut value = value;
let mut buf = [0; 8];
let len = u64_to_bin_len(value) as usize;
let mut pos = len;
while value > 0 {
pos -= 1;
buf[pos] = value as u8;
value >>= 8;
}
assert!(value == 0 && pos == 0);
writer.write_all(&buf[0..len])?;
Ok(())
}
/// Converts a buffer to an integer using a special format used in the datapack format.
fn bin_to_u64(buf: &[u8]) -> u64 {
let mut n: u64 = 0;
for byte in buf.iter() {
n <<= 8;
n |= *byte as u64;
}
n
}
#[cfg(test)]
mod tests {
use quickcheck::quickcheck;
use super::*;
quickcheck! {
fn test_roundtrip_bin_to_u64(value: u64) -> bool {
let mut buf: Vec<u8> = vec![];
u64_to_bin(value, &mut buf).unwrap();
if buf.len()!= u64_to_bin_len(value) as usize {
return false;
}
let new_value = bin_to_u64(&buf);
value == new_value
}
fn test_roundtrip_metadata(size: Option<u64>, flags: Option<u64>) -> bool {
let meta = Metadata { size, flags };
let mut buf: Vec<u8> = vec![];
meta.write(&mut buf).expect("write");
let read_meta = Metadata::read(&mut Cursor::new(&buf)).expect("read");
meta.size == read_meta.size && (meta.flags == read_meta.flags || meta.flags.map_or(false, |v| v == 0))
}
}
}
|
{
let mut buf = vec![];
if let Some(flags) = self.flags {
if flags != 0 {
Metadata::write_meta(b'f', flags, &mut buf)?;
}
}
if let Some(size) = self.size {
Metadata::write_meta(b's', size, &mut buf)?;
}
writer.write_u32::<BigEndian>(buf.len() as u32)?;
writer.write_all(buf.as_ref())?;
Ok(())
}
|
identifier_body
|
cartesian_rectangle.rs
|
use crate::geometry::Point;
pub struct
|
{
center: Point,
width: f64,
height: f64
}
impl CartesianRectangle {
pub fn new(center: Point, width: f64, height: f64) -> Self {
Self {
center,
width,
height
}
}
pub fn x_bounds(&self) -> Point {
Point::new(self.center.x - self.width / 2.0, self.center.x + self.width / 2.0)
}
pub fn y_bounds(&self) -> Point {
Point::new(self.center.y - self.height / 2.0, self.center.y + self.height / 2.0)
}
pub fn contains_point(&self, point: Point) -> bool {
let x_bounds = self.x_bounds();
let y_bounds = self.y_bounds();
point.x < x_bounds.y
&& point.x > x_bounds.x
&& point.y < y_bounds.y
&& point.y > y_bounds.x
}
}
|
CartesianRectangle
|
identifier_name
|
cartesian_rectangle.rs
|
use crate::geometry::Point;
pub struct CartesianRectangle {
center: Point,
width: f64,
height: f64
}
impl CartesianRectangle {
pub fn new(center: Point, width: f64, height: f64) -> Self {
Self {
center,
width,
height
}
}
pub fn x_bounds(&self) -> Point {
Point::new(self.center.x - self.width / 2.0, self.center.x + self.width / 2.0)
}
pub fn y_bounds(&self) -> Point {
Point::new(self.center.y - self.height / 2.0, self.center.y + self.height / 2.0)
}
pub fn contains_point(&self, point: Point) -> bool {
let x_bounds = self.x_bounds();
let y_bounds = self.y_bounds();
|
&& point.y > y_bounds.x
}
}
|
point.x < x_bounds.y
&& point.x > x_bounds.x
&& point.y < y_bounds.y
|
random_line_split
|
cartesian_rectangle.rs
|
use crate::geometry::Point;
pub struct CartesianRectangle {
center: Point,
width: f64,
height: f64
}
impl CartesianRectangle {
pub fn new(center: Point, width: f64, height: f64) -> Self {
Self {
center,
width,
height
}
}
pub fn x_bounds(&self) -> Point {
Point::new(self.center.x - self.width / 2.0, self.center.x + self.width / 2.0)
}
pub fn y_bounds(&self) -> Point {
Point::new(self.center.y - self.height / 2.0, self.center.y + self.height / 2.0)
}
pub fn contains_point(&self, point: Point) -> bool
|
}
|
{
let x_bounds = self.x_bounds();
let y_bounds = self.y_bounds();
point.x < x_bounds.y
&& point.x > x_bounds.x
&& point.y < y_bounds.y
&& point.y > y_bounds.x
}
|
identifier_body
|
cursor.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A list of common mouse cursors per CSS3-UI § 8.1.1.
use super::{CssWriter, ToCss};
macro_rules! define_cursor {
(
common properties = [
$( $c_css: expr => $c_variant: ident = $c_value: expr, )+
]
gecko properties = [
$( $g_css: expr => $g_variant: ident = $g_value: expr, )+
]
) => {
/// <https://drafts.csswg.org/css-ui/#cursor>
#[derive(Clone, Copy, Debug, Eq, MallocSizeOf, PartialEq)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[repr(u8)]
#[allow(missing_docs)]
pub enum CursorKind {
$( $c_variant = $c_value, )+
$( #[cfg(feature = "gecko")] $g_variant = $g_value, )+
}
impl CursorKind {
/// Given a CSS keyword, get the corresponding cursor enum.
pub fn from_css_keyword(keyword: &str) -> Result<Self, ()> {
match_ignore_ascii_case! { &keyword,
$( $c_css => Ok(CursorKind::$c_variant), )+
$( #[cfg(feature = "gecko")] $g_css => Ok(CursorKind::$g_variant), )+
_ => Err(())
}
}
/// From the C u8 value, get the corresponding Cursor enum.
pub fn from_u8(value: u8) -> Result<Self, ()> {
match value {
$( $c_value => Ok(CursorKind::$c_variant), )+
$( #[cfg(feature = "gecko")] $g_value => Ok(CursorKind::$g_variant), )+
_ => Err(())
}
}
}
impl ToCss for CursorKind {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> ::std::fmt::Result where W: ::std::fmt::Write {
match *self {
$(CursorKind::$c_variant => {
::std::fmt::Write::write_str(dest, $c_css)
})+
$(#[cfg(feature = "gecko")] CursorKind::$g_variant => {
::std::fmt::Write::write_str(dest, $g_css)
})+
}
}
}
}
}
define_cursor! {
common properties = [
"none" => None = 0,
"default" => Default = 1,
"pointer" => Pointer = 2,
"context-menu" => ContextMenu = 3,
"help" => Help = 4,
"progress" => Progress = 5,
"wait" => Wait = 6,
"cell" => Cell = 7,
"crosshair" => Crosshair = 8,
"text" => Text = 9,
"vertical-text" => VerticalText = 10,
"alias" => Alias = 11,
"copy" => Copy = 12,
"move" => Move = 13,
"no-drop" => NoDrop = 14,
"not-allowed" => NotAllowed = 15,
"grab" => Grab = 16,
"grabbing" => Grabbing = 17,
|
"s-resize" => SResize = 22,
"se-resize" => SeResize = 23,
"sw-resize" => SwResize = 24,
"w-resize" => WResize = 25,
"ew-resize" => EwResize = 26,
"ns-resize" => NsResize = 27,
"nesw-resize" => NeswResize = 28,
"nwse-resize" => NwseResize = 29,
"col-resize" => ColResize = 30,
"row-resize" => RowResize = 31,
"all-scroll" => AllScroll = 32,
"zoom-in" => ZoomIn = 33,
"zoom-out" => ZoomOut = 34,
"auto" => Auto = 35,
]
// gecko only properties
gecko properties = [
"-moz-grab" => MozGrab = 36,
"-moz-grabbing" => MozGrabbing = 37,
"-moz-zoom-in" => MozZoomIn = 38,
"-moz-zoom-out" => MozZoomOut = 39,
]
}
|
"e-resize" => EResize = 18,
"n-resize" => NResize = 19,
"ne-resize" => NeResize = 20,
"nw-resize" => NwResize = 21,
|
random_line_split
|
method-self-arg-trait.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test method calls with self as an argument
#![allow(unknown_features)]
#![feature(box_syntax)]
static mut COUNT: u64 = 1;
#[derive(Copy)]
struct Foo;
trait Bar : Sized {
fn foo1(&self);
fn foo2(self);
fn foo3(self: Box<Self>);
fn bar1(&self) {
unsafe { COUNT *= 7; }
}
fn bar2(self) {
unsafe { COUNT *= 11; }
}
fn bar3(self: Box<Self>) {
unsafe { COUNT *= 13; }
}
}
impl Bar for Foo {
fn foo1(&self)
|
fn foo2(self) {
unsafe { COUNT *= 3; }
}
fn foo3(self: Box<Foo>) {
unsafe { COUNT *= 5; }
}
}
impl Foo {
fn baz(self) {
unsafe { COUNT *= 17; }
// Test internal call.
Bar::foo1(&self);
Bar::foo2(self);
Bar::foo3(box self);
Bar::bar1(&self);
Bar::bar2(self);
Bar::bar3(box self);
}
}
fn main() {
let x = Foo;
// Test external call.
Bar::foo1(&x);
Bar::foo2(x);
Bar::foo3(box x);
Bar::bar1(&x);
Bar::bar2(x);
Bar::bar3(box x);
x.baz();
unsafe { assert!(COUNT == 2u64*2*3*3*5*5*7*7*11*11*13*13*17); }
}
|
{
unsafe { COUNT *= 2; }
}
|
identifier_body
|
method-self-arg-trait.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test method calls with self as an argument
#![allow(unknown_features)]
#![feature(box_syntax)]
static mut COUNT: u64 = 1;
#[derive(Copy)]
struct Foo;
trait Bar : Sized {
fn foo1(&self);
fn foo2(self);
fn foo3(self: Box<Self>);
fn bar1(&self) {
unsafe { COUNT *= 7; }
}
fn bar2(self) {
unsafe { COUNT *= 11; }
}
fn bar3(self: Box<Self>) {
unsafe { COUNT *= 13; }
}
}
impl Bar for Foo {
fn foo1(&self) {
unsafe { COUNT *= 2; }
|
unsafe { COUNT *= 3; }
}
fn foo3(self: Box<Foo>) {
unsafe { COUNT *= 5; }
}
}
impl Foo {
fn baz(self) {
unsafe { COUNT *= 17; }
// Test internal call.
Bar::foo1(&self);
Bar::foo2(self);
Bar::foo3(box self);
Bar::bar1(&self);
Bar::bar2(self);
Bar::bar3(box self);
}
}
fn main() {
let x = Foo;
// Test external call.
Bar::foo1(&x);
Bar::foo2(x);
Bar::foo3(box x);
Bar::bar1(&x);
Bar::bar2(x);
Bar::bar3(box x);
x.baz();
unsafe { assert!(COUNT == 2u64*2*3*3*5*5*7*7*11*11*13*13*17); }
}
|
}
fn foo2(self) {
|
random_line_split
|
method-self-arg-trait.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test method calls with self as an argument
#![allow(unknown_features)]
#![feature(box_syntax)]
static mut COUNT: u64 = 1;
#[derive(Copy)]
struct Foo;
trait Bar : Sized {
fn foo1(&self);
fn foo2(self);
fn foo3(self: Box<Self>);
fn bar1(&self) {
unsafe { COUNT *= 7; }
}
fn bar2(self) {
unsafe { COUNT *= 11; }
}
fn bar3(self: Box<Self>) {
unsafe { COUNT *= 13; }
}
}
impl Bar for Foo {
fn foo1(&self) {
unsafe { COUNT *= 2; }
}
fn
|
(self) {
unsafe { COUNT *= 3; }
}
fn foo3(self: Box<Foo>) {
unsafe { COUNT *= 5; }
}
}
impl Foo {
fn baz(self) {
unsafe { COUNT *= 17; }
// Test internal call.
Bar::foo1(&self);
Bar::foo2(self);
Bar::foo3(box self);
Bar::bar1(&self);
Bar::bar2(self);
Bar::bar3(box self);
}
}
fn main() {
let x = Foo;
// Test external call.
Bar::foo1(&x);
Bar::foo2(x);
Bar::foo3(box x);
Bar::bar1(&x);
Bar::bar2(x);
Bar::bar3(box x);
x.baz();
unsafe { assert!(COUNT == 2u64*2*3*3*5*5*7*7*11*11*13*13*17); }
}
|
foo2
|
identifier_name
|
tag-variant-disr-dup.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//error-pattern:discriminator value already exists
// black and white have the same discriminator value...
|
red = 0xff0000,
green = 0x00ff00,
blue = 0x0000ff,
black = 0x000000,
white = 0x000000,
}
|
enum color {
|
random_line_split
|
tag-variant-disr-dup.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//error-pattern:discriminator value already exists
// black and white have the same discriminator value...
enum
|
{
red = 0xff0000,
green = 0x00ff00,
blue = 0x0000ff,
black = 0x000000,
white = 0x000000,
}
|
color
|
identifier_name
|
iter.rs
|
/*
* iter.rs: Iterator implementation for rbtree.
* Copyright (C) 2019 Oddcoder
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// Credits where credits goes!
// https://codereview.stackexchange.com/questions/110161/binary-trees-in-rust-iterators
use super::rbtree_wrapper::{Augment, RBTree};
/// Iterator for [RBtree]
pub struct TreeIterator<K: Ord + Copy, A: Copy, V> {
right: Vec<RBTree<K, A, V>>,
current: Option<RBTree<K, A, V>>,
}
impl<K: Ord + Copy, A: Copy, V> TreeIterator<K, A, V>
where
RBTree<K, A, V>: Augment<A>,
{
pub(crate) fn new(root: RBTree<K, A, V>) -> TreeIterator<K, A, V> {
let mut iter = TreeIterator { right: vec![], current: None };
iter.add_subtree(root);
iter
}
fn add_subtree(&mut self, root: RBTree<K, A, V>) {
let mut node: RBTree<K, A, V> = root;
while node.is_node() {
if node.right_ref().is_node() {
self.right.push(node.right());
}
if node.left_ref().is_node() {
let tmp = node.left();
self.right.push(node);
node = tmp;
} else {
break;
}
}
self.current = if node.is_node() { Some(node) } else { None };
}
}
impl<K: Ord + Copy, A: Copy, V> Iterator for TreeIterator<K, A, V>
where
RBTree<K, A, V>: Augment<A>,
{
type Item = (K, A, V);
fn next(&mut self) -> Option<(K, A, V)> {
let result;
if let Some(node) = self.current.take()
|
else {
return None;
}
if let Some(node) = self.right.pop() {
self.add_subtree(node);
}
result
}
}
|
{
result = Some((node.key(), node.aug_data(), node.data()));
}
|
conditional_block
|
iter.rs
|
/*
* iter.rs: Iterator implementation for rbtree.
* Copyright (C) 2019 Oddcoder
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// Credits where credits goes!
// https://codereview.stackexchange.com/questions/110161/binary-trees-in-rust-iterators
use super::rbtree_wrapper::{Augment, RBTree};
/// Iterator for [RBtree]
pub struct TreeIterator<K: Ord + Copy, A: Copy, V> {
right: Vec<RBTree<K, A, V>>,
current: Option<RBTree<K, A, V>>,
}
impl<K: Ord + Copy, A: Copy, V> TreeIterator<K, A, V>
where
RBTree<K, A, V>: Augment<A>,
{
pub(crate) fn new(root: RBTree<K, A, V>) -> TreeIterator<K, A, V> {
let mut iter = TreeIterator { right: vec![], current: None };
iter.add_subtree(root);
iter
}
fn add_subtree(&mut self, root: RBTree<K, A, V>) {
let mut node: RBTree<K, A, V> = root;
while node.is_node() {
if node.right_ref().is_node() {
self.right.push(node.right());
}
if node.left_ref().is_node() {
let tmp = node.left();
self.right.push(node);
node = tmp;
} else {
break;
}
}
self.current = if node.is_node() { Some(node) } else { None };
}
}
impl<K: Ord + Copy, A: Copy, V> Iterator for TreeIterator<K, A, V>
where
RBTree<K, A, V>: Augment<A>,
{
type Item = (K, A, V);
fn next(&mut self) -> Option<(K, A, V)>
|
}
|
{
let result;
if let Some(node) = self.current.take() {
result = Some((node.key(), node.aug_data(), node.data()));
} else {
return None;
}
if let Some(node) = self.right.pop() {
self.add_subtree(node);
}
result
}
|
identifier_body
|
iter.rs
|
/*
* iter.rs: Iterator implementation for rbtree.
* Copyright (C) 2019 Oddcoder
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// Credits where credits goes!
// https://codereview.stackexchange.com/questions/110161/binary-trees-in-rust-iterators
use super::rbtree_wrapper::{Augment, RBTree};
/// Iterator for [RBtree]
pub struct TreeIterator<K: Ord + Copy, A: Copy, V> {
right: Vec<RBTree<K, A, V>>,
current: Option<RBTree<K, A, V>>,
}
impl<K: Ord + Copy, A: Copy, V> TreeIterator<K, A, V>
where
RBTree<K, A, V>: Augment<A>,
{
pub(crate) fn new(root: RBTree<K, A, V>) -> TreeIterator<K, A, V> {
let mut iter = TreeIterator { right: vec![], current: None };
iter.add_subtree(root);
iter
}
fn
|
(&mut self, root: RBTree<K, A, V>) {
let mut node: RBTree<K, A, V> = root;
while node.is_node() {
if node.right_ref().is_node() {
self.right.push(node.right());
}
if node.left_ref().is_node() {
let tmp = node.left();
self.right.push(node);
node = tmp;
} else {
break;
}
}
self.current = if node.is_node() { Some(node) } else { None };
}
}
impl<K: Ord + Copy, A: Copy, V> Iterator for TreeIterator<K, A, V>
where
RBTree<K, A, V>: Augment<A>,
{
type Item = (K, A, V);
fn next(&mut self) -> Option<(K, A, V)> {
let result;
if let Some(node) = self.current.take() {
result = Some((node.key(), node.aug_data(), node.data()));
} else {
return None;
}
if let Some(node) = self.right.pop() {
self.add_subtree(node);
}
result
}
}
|
add_subtree
|
identifier_name
|
iter.rs
|
/*
* iter.rs: Iterator implementation for rbtree.
* Copyright (C) 2019 Oddcoder
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// Credits where credits goes!
// https://codereview.stackexchange.com/questions/110161/binary-trees-in-rust-iterators
use super::rbtree_wrapper::{Augment, RBTree};
/// Iterator for [RBtree]
pub struct TreeIterator<K: Ord + Copy, A: Copy, V> {
right: Vec<RBTree<K, A, V>>,
current: Option<RBTree<K, A, V>>,
}
impl<K: Ord + Copy, A: Copy, V> TreeIterator<K, A, V>
where
RBTree<K, A, V>: Augment<A>,
{
pub(crate) fn new(root: RBTree<K, A, V>) -> TreeIterator<K, A, V> {
let mut iter = TreeIterator { right: vec![], current: None };
iter.add_subtree(root);
iter
}
fn add_subtree(&mut self, root: RBTree<K, A, V>) {
let mut node: RBTree<K, A, V> = root;
while node.is_node() {
if node.right_ref().is_node() {
self.right.push(node.right());
}
if node.left_ref().is_node() {
let tmp = node.left();
self.right.push(node);
node = tmp;
} else {
break;
}
}
self.current = if node.is_node() { Some(node) } else { None };
}
}
impl<K: Ord + Copy, A: Copy, V> Iterator for TreeIterator<K, A, V>
where
RBTree<K, A, V>: Augment<A>,
{
type Item = (K, A, V);
fn next(&mut self) -> Option<(K, A, V)> {
|
return None;
}
if let Some(node) = self.right.pop() {
self.add_subtree(node);
}
result
}
}
|
let result;
if let Some(node) = self.current.take() {
result = Some((node.key(), node.aug_data(), node.data()));
} else {
|
random_line_split
|
interface.rs
|
use std::io::{self, Write, BufWriter};
use super::{MatchWithPositions, match_and_score_with_positions};
use ansi::{clear, color, cursor, style};
use terminal::{self, Terminal, Key, Event};
use rayon::prelude::*;
#[derive(Debug)]
pub enum Error {
Exit,
Write(io::Error),
Reset(terminal::Error)
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Write(err)
}
}
impl From<terminal::Error> for Error {
fn from(err: terminal::Error) -> Error {
Error::Reset(err)
}
}
pub struct Interface<'a> {
lines: &'a [String],
matches: Vec<MatchWithPositions<'a>>,
search: String,
selected: usize,
choices_width: usize,
width: usize,
terminal: Terminal,
}
impl<'a> Interface<'a> {
// Creates a new Interface with the provided lines
pub fn new(lines: &'a [String]) -> Interface<'a> {
let mut terminal = Terminal::from("/dev/tty").unwrap();
let choices_width = format!("{}", lines.len()).len();
terminal.set_raw_mode().unwrap();
Interface {
lines: lines,
matches: vec![],
search: String::new(),
selected: 0,
choices_width: choices_width,
width: terminal.max_width,
terminal: terminal,
}
}
// Runs the Interface, returning either the final selection, or an error
pub fn run(&mut self) -> Result<&str, Error> {
self.filter_matches();
self.render()?;
for event in self.terminal.events()? {
if let Event::Key(key) = event? {
match key {
Key::Ctrl('c') | Key::Ctrl('d') | Key::Escape => {
self.reset()?;
return Err(Error::Exit);
}
Key::Char('\n') => {
break;
},
Key::Ctrl('n') => {
self.selected += 1;
self.render()?;
},
Key::Ctrl('p') => {
self.selected = self.selected.saturating_sub(1);
self.render()?;
},
Key::Char(ch) => {
self.search.push(ch);
self.filter_existing();
self.render()?;
},
Key::Backspace | Key::Ctrl('h') => {
self.search.pop();
self.filter_matches();
self.render()?;
}
Key::Ctrl('u') => {
self.search.clear();
self.filter_matches();
self.render()?;
}
_ => {}
}
};
}
self.reset()?;
Ok(self.result())
}
// Matches and scores `lines` by `search`, sorting the result
fn filter_matches(&mut self) {
let ref search = self.search;
self.matches = self.lines.
par_iter().
filter_map(|line| match_and_score_with_positions(search, line)).
collect();
self.matches.par_sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap().reverse());
}
// Matches and scores the existing `matches` by `search`, sorting the result
fn filter_existing(&mut self) {
let ref search = self.search;
self.matches = self.matches.
par_iter().
filter_map(|&(line, _, _)| match_and_score_with_positions(search, line)).
collect();
self.matches.par_sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap().reverse());
}
// Renders the current state of the Interface to it's `terminal`
fn render(&mut self) -> io::Result<()> {
self.clamp_selected();
let prompt = self.prompt();
let matches = self.matches.iter().take(10);
let n = matches.len() as u16;
let mut term = BufWriter::new(&mut self.terminal);
write!(term, "{}{}{}", cursor::Column(1), clear::Screen, prompt)?;
for (i, choice) in matches.enumerate() {
let selected = i == self.selected;
let chars = choice.0.chars().take(self.width);
write!(term, "\r\n")?;
if selected {
write!(term, "{}", style::Invert)?;
}
let ref positions = choice.2;
for (i, ch) in chars.enumerate() {
if positions.contains(&i) {
let color = color::Fg(color::Colors::Magenta);
let reset = color::Fg(color::Reset);
write!(term, "{}{}{}", color, ch, reset)?;
} else {
write!(term, "{}", ch)?;
}
}
if selected {
write!(term, "{}", style::NoInvert)?;
}
}
if n > 0 {
let col = (prompt.len() + 1) as u16;
write!(term, "{}{}", cursor::Up(n), cursor::Column(col))?;
}
Ok(())
}
// Generates the input prompt
fn prompt(&self) -> String {
let count = self.matches.len();
format!("{:width$} > {}", count, self.search, width = self.choices_width)
}
// Clamps `selected`, such that it doesn't overflow the matches length
fn clamp_selected(&mut self) {
let mut max = self.matches.len();
if max > 10 { max = 10; }
if self.selected >= max {
self.selected = if max > 0 { max - 1 } else { 0 };
}
}
// Resets the `terminal`
fn reset(&mut self) -> Result<(), Error> {
write!(self.terminal, "{}{}", cursor::Column(1), clear::Screen)?;
self.terminal.reset()?;
Ok(())
}
fn result(&mut self) -> &str
|
}
|
{
self.matches.iter().
nth(self.selected).
map(|choice| choice.0).
unwrap_or(&self.search)
}
|
identifier_body
|
interface.rs
|
use std::io::{self, Write, BufWriter};
use super::{MatchWithPositions, match_and_score_with_positions};
use ansi::{clear, color, cursor, style};
use terminal::{self, Terminal, Key, Event};
use rayon::prelude::*;
#[derive(Debug)]
pub enum Error {
Exit,
Write(io::Error),
Reset(terminal::Error)
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Write(err)
}
}
impl From<terminal::Error> for Error {
fn from(err: terminal::Error) -> Error {
Error::Reset(err)
}
}
pub struct Interface<'a> {
lines: &'a [String],
matches: Vec<MatchWithPositions<'a>>,
search: String,
selected: usize,
choices_width: usize,
width: usize,
terminal: Terminal,
}
impl<'a> Interface<'a> {
// Creates a new Interface with the provided lines
pub fn new(lines: &'a [String]) -> Interface<'a> {
let mut terminal = Terminal::from("/dev/tty").unwrap();
let choices_width = format!("{}", lines.len()).len();
terminal.set_raw_mode().unwrap();
Interface {
lines: lines,
matches: vec![],
search: String::new(),
selected: 0,
choices_width: choices_width,
width: terminal.max_width,
terminal: terminal,
}
}
// Runs the Interface, returning either the final selection, or an error
pub fn run(&mut self) -> Result<&str, Error> {
self.filter_matches();
self.render()?;
for event in self.terminal.events()? {
if let Event::Key(key) = event? {
match key {
Key::Ctrl('c') | Key::Ctrl('d') | Key::Escape => {
self.reset()?;
return Err(Error::Exit);
}
Key::Char('\n') => {
break;
},
Key::Ctrl('n') => {
self.selected += 1;
self.render()?;
},
Key::Ctrl('p') => {
self.selected = self.selected.saturating_sub(1);
self.render()?;
},
Key::Char(ch) => {
self.search.push(ch);
self.filter_existing();
self.render()?;
},
Key::Backspace | Key::Ctrl('h') => {
self.search.pop();
self.filter_matches();
self.render()?;
}
Key::Ctrl('u') => {
self.search.clear();
self.filter_matches();
self.render()?;
}
_ => {}
}
};
}
self.reset()?;
Ok(self.result())
}
// Matches and scores `lines` by `search`, sorting the result
fn
|
(&mut self) {
let ref search = self.search;
self.matches = self.lines.
par_iter().
filter_map(|line| match_and_score_with_positions(search, line)).
collect();
self.matches.par_sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap().reverse());
}
// Matches and scores the existing `matches` by `search`, sorting the result
fn filter_existing(&mut self) {
let ref search = self.search;
self.matches = self.matches.
par_iter().
filter_map(|&(line, _, _)| match_and_score_with_positions(search, line)).
collect();
self.matches.par_sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap().reverse());
}
// Renders the current state of the Interface to it's `terminal`
fn render(&mut self) -> io::Result<()> {
self.clamp_selected();
let prompt = self.prompt();
let matches = self.matches.iter().take(10);
let n = matches.len() as u16;
let mut term = BufWriter::new(&mut self.terminal);
write!(term, "{}{}{}", cursor::Column(1), clear::Screen, prompt)?;
for (i, choice) in matches.enumerate() {
let selected = i == self.selected;
let chars = choice.0.chars().take(self.width);
write!(term, "\r\n")?;
if selected {
write!(term, "{}", style::Invert)?;
}
let ref positions = choice.2;
for (i, ch) in chars.enumerate() {
if positions.contains(&i) {
let color = color::Fg(color::Colors::Magenta);
let reset = color::Fg(color::Reset);
write!(term, "{}{}{}", color, ch, reset)?;
} else {
write!(term, "{}", ch)?;
}
}
if selected {
write!(term, "{}", style::NoInvert)?;
}
}
if n > 0 {
let col = (prompt.len() + 1) as u16;
write!(term, "{}{}", cursor::Up(n), cursor::Column(col))?;
}
Ok(())
}
// Generates the input prompt
fn prompt(&self) -> String {
let count = self.matches.len();
format!("{:width$} > {}", count, self.search, width = self.choices_width)
}
// Clamps `selected`, such that it doesn't overflow the matches length
fn clamp_selected(&mut self) {
let mut max = self.matches.len();
if max > 10 { max = 10; }
if self.selected >= max {
self.selected = if max > 0 { max - 1 } else { 0 };
}
}
// Resets the `terminal`
fn reset(&mut self) -> Result<(), Error> {
write!(self.terminal, "{}{}", cursor::Column(1), clear::Screen)?;
self.terminal.reset()?;
Ok(())
}
fn result(&mut self) -> &str {
self.matches.iter().
nth(self.selected).
map(|choice| choice.0).
unwrap_or(&self.search)
}
}
|
filter_matches
|
identifier_name
|
interface.rs
|
use std::io::{self, Write, BufWriter};
use super::{MatchWithPositions, match_and_score_with_positions};
use ansi::{clear, color, cursor, style};
use terminal::{self, Terminal, Key, Event};
use rayon::prelude::*;
#[derive(Debug)]
pub enum Error {
Exit,
Write(io::Error),
Reset(terminal::Error)
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Write(err)
}
}
impl From<terminal::Error> for Error {
fn from(err: terminal::Error) -> Error {
Error::Reset(err)
}
}
pub struct Interface<'a> {
lines: &'a [String],
matches: Vec<MatchWithPositions<'a>>,
search: String,
selected: usize,
choices_width: usize,
width: usize,
terminal: Terminal,
}
impl<'a> Interface<'a> {
// Creates a new Interface with the provided lines
pub fn new(lines: &'a [String]) -> Interface<'a> {
let mut terminal = Terminal::from("/dev/tty").unwrap();
let choices_width = format!("{}", lines.len()).len();
terminal.set_raw_mode().unwrap();
Interface {
lines: lines,
matches: vec![],
search: String::new(),
selected: 0,
choices_width: choices_width,
width: terminal.max_width,
terminal: terminal,
}
}
// Runs the Interface, returning either the final selection, or an error
pub fn run(&mut self) -> Result<&str, Error> {
self.filter_matches();
self.render()?;
for event in self.terminal.events()? {
if let Event::Key(key) = event? {
match key {
Key::Ctrl('c') | Key::Ctrl('d') | Key::Escape => {
self.reset()?;
return Err(Error::Exit);
}
Key::Char('\n') => {
break;
},
Key::Ctrl('n') => {
self.selected += 1;
self.render()?;
},
Key::Ctrl('p') => {
self.selected = self.selected.saturating_sub(1);
self.render()?;
},
Key::Char(ch) => {
self.search.push(ch);
self.filter_existing();
|
self.render()?;
},
Key::Backspace | Key::Ctrl('h') => {
self.search.pop();
self.filter_matches();
self.render()?;
}
Key::Ctrl('u') => {
self.search.clear();
self.filter_matches();
self.render()?;
}
_ => {}
}
};
}
self.reset()?;
Ok(self.result())
}
// Matches and scores `lines` by `search`, sorting the result
fn filter_matches(&mut self) {
let ref search = self.search;
self.matches = self.lines.
par_iter().
filter_map(|line| match_and_score_with_positions(search, line)).
collect();
self.matches.par_sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap().reverse());
}
// Matches and scores the existing `matches` by `search`, sorting the result
fn filter_existing(&mut self) {
let ref search = self.search;
self.matches = self.matches.
par_iter().
filter_map(|&(line, _, _)| match_and_score_with_positions(search, line)).
collect();
self.matches.par_sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap().reverse());
}
// Renders the current state of the Interface to it's `terminal`
fn render(&mut self) -> io::Result<()> {
self.clamp_selected();
let prompt = self.prompt();
let matches = self.matches.iter().take(10);
let n = matches.len() as u16;
let mut term = BufWriter::new(&mut self.terminal);
write!(term, "{}{}{}", cursor::Column(1), clear::Screen, prompt)?;
for (i, choice) in matches.enumerate() {
let selected = i == self.selected;
let chars = choice.0.chars().take(self.width);
write!(term, "\r\n")?;
if selected {
write!(term, "{}", style::Invert)?;
}
let ref positions = choice.2;
for (i, ch) in chars.enumerate() {
if positions.contains(&i) {
let color = color::Fg(color::Colors::Magenta);
let reset = color::Fg(color::Reset);
write!(term, "{}{}{}", color, ch, reset)?;
} else {
write!(term, "{}", ch)?;
}
}
if selected {
write!(term, "{}", style::NoInvert)?;
}
}
if n > 0 {
let col = (prompt.len() + 1) as u16;
write!(term, "{}{}", cursor::Up(n), cursor::Column(col))?;
}
Ok(())
}
// Generates the input prompt
fn prompt(&self) -> String {
let count = self.matches.len();
format!("{:width$} > {}", count, self.search, width = self.choices_width)
}
// Clamps `selected`, such that it doesn't overflow the matches length
fn clamp_selected(&mut self) {
let mut max = self.matches.len();
if max > 10 { max = 10; }
if self.selected >= max {
self.selected = if max > 0 { max - 1 } else { 0 };
}
}
// Resets the `terminal`
fn reset(&mut self) -> Result<(), Error> {
write!(self.terminal, "{}{}", cursor::Column(1), clear::Screen)?;
self.terminal.reset()?;
Ok(())
}
fn result(&mut self) -> &str {
self.matches.iter().
nth(self.selected).
map(|choice| choice.0).
unwrap_or(&self.search)
}
}
|
random_line_split
|
|
interface.rs
|
use std::io::{self, Write, BufWriter};
use super::{MatchWithPositions, match_and_score_with_positions};
use ansi::{clear, color, cursor, style};
use terminal::{self, Terminal, Key, Event};
use rayon::prelude::*;
#[derive(Debug)]
pub enum Error {
Exit,
Write(io::Error),
Reset(terminal::Error)
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Write(err)
}
}
impl From<terminal::Error> for Error {
fn from(err: terminal::Error) -> Error {
Error::Reset(err)
}
}
pub struct Interface<'a> {
lines: &'a [String],
matches: Vec<MatchWithPositions<'a>>,
search: String,
selected: usize,
choices_width: usize,
width: usize,
terminal: Terminal,
}
impl<'a> Interface<'a> {
// Creates a new Interface with the provided lines
pub fn new(lines: &'a [String]) -> Interface<'a> {
let mut terminal = Terminal::from("/dev/tty").unwrap();
let choices_width = format!("{}", lines.len()).len();
terminal.set_raw_mode().unwrap();
Interface {
lines: lines,
matches: vec![],
search: String::new(),
selected: 0,
choices_width: choices_width,
width: terminal.max_width,
terminal: terminal,
}
}
// Runs the Interface, returning either the final selection, or an error
pub fn run(&mut self) -> Result<&str, Error> {
self.filter_matches();
self.render()?;
for event in self.terminal.events()? {
if let Event::Key(key) = event? {
match key {
Key::Ctrl('c') | Key::Ctrl('d') | Key::Escape => {
self.reset()?;
return Err(Error::Exit);
}
Key::Char('\n') =>
|
,
Key::Ctrl('n') => {
self.selected += 1;
self.render()?;
},
Key::Ctrl('p') => {
self.selected = self.selected.saturating_sub(1);
self.render()?;
},
Key::Char(ch) => {
self.search.push(ch);
self.filter_existing();
self.render()?;
},
Key::Backspace | Key::Ctrl('h') => {
self.search.pop();
self.filter_matches();
self.render()?;
}
Key::Ctrl('u') => {
self.search.clear();
self.filter_matches();
self.render()?;
}
_ => {}
}
};
}
self.reset()?;
Ok(self.result())
}
// Matches and scores `lines` by `search`, sorting the result
fn filter_matches(&mut self) {
let ref search = self.search;
self.matches = self.lines.
par_iter().
filter_map(|line| match_and_score_with_positions(search, line)).
collect();
self.matches.par_sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap().reverse());
}
// Matches and scores the existing `matches` by `search`, sorting the result
fn filter_existing(&mut self) {
let ref search = self.search;
self.matches = self.matches.
par_iter().
filter_map(|&(line, _, _)| match_and_score_with_positions(search, line)).
collect();
self.matches.par_sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap().reverse());
}
// Renders the current state of the Interface to it's `terminal`
fn render(&mut self) -> io::Result<()> {
self.clamp_selected();
let prompt = self.prompt();
let matches = self.matches.iter().take(10);
let n = matches.len() as u16;
let mut term = BufWriter::new(&mut self.terminal);
write!(term, "{}{}{}", cursor::Column(1), clear::Screen, prompt)?;
for (i, choice) in matches.enumerate() {
let selected = i == self.selected;
let chars = choice.0.chars().take(self.width);
write!(term, "\r\n")?;
if selected {
write!(term, "{}", style::Invert)?;
}
let ref positions = choice.2;
for (i, ch) in chars.enumerate() {
if positions.contains(&i) {
let color = color::Fg(color::Colors::Magenta);
let reset = color::Fg(color::Reset);
write!(term, "{}{}{}", color, ch, reset)?;
} else {
write!(term, "{}", ch)?;
}
}
if selected {
write!(term, "{}", style::NoInvert)?;
}
}
if n > 0 {
let col = (prompt.len() + 1) as u16;
write!(term, "{}{}", cursor::Up(n), cursor::Column(col))?;
}
Ok(())
}
// Generates the input prompt
fn prompt(&self) -> String {
let count = self.matches.len();
format!("{:width$} > {}", count, self.search, width = self.choices_width)
}
// Clamps `selected`, such that it doesn't overflow the matches length
fn clamp_selected(&mut self) {
let mut max = self.matches.len();
if max > 10 { max = 10; }
if self.selected >= max {
self.selected = if max > 0 { max - 1 } else { 0 };
}
}
// Resets the `terminal`
fn reset(&mut self) -> Result<(), Error> {
write!(self.terminal, "{}{}", cursor::Column(1), clear::Screen)?;
self.terminal.reset()?;
Ok(())
}
fn result(&mut self) -> &str {
self.matches.iter().
nth(self.selected).
map(|choice| choice.0).
unwrap_or(&self.search)
}
}
|
{
break;
}
|
conditional_block
|
sample_index.rs
|
use std::{
cmp::Ordering,
error::Error,
fmt,
ops::{Add, AddAssign, Sub},
};
use metadata::Duration;
use super::{SampleIndexRange, Timestamp};
#[derive(Debug)]
pub struct DecrementError;
impl fmt::Display for DecrementError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
f.write_str("overflow while decrementing")
}
}
impl Error for DecrementError {}
#[derive(Clone, Copy, Default, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub struct SampleIndex(usize);
impl SampleIndex {
pub fn new(value: usize) -> Self {
SampleIndex(value)
}
#[track_caller]
pub fn from_ts(ts: Timestamp, sample_duration: Duration) -> Self {
SampleIndex((ts.as_u64() / sample_duration.as_u64()) as usize)
}
#[track_caller]
pub fn snap_to(self, sample_step: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 / sample_step.as_usize() * sample_step.as_usize())
}
#[track_caller]
pub fn as_ts(self, sample_duration: Duration) -> Timestamp {
Timestamp::new(self.0 as u64 * sample_duration.as_u64())
}
pub fn as_usize(self) -> usize {
self.0
}
pub fn as_u64(self) -> u64 {
self.0 as u64
}
pub fn try_dec(&mut self) -> Result<(), DecrementError> {
if self.0 > 0 {
*self = SampleIndex(self.0 - 1);
Ok(())
} else {
Err(DecrementError)
}
}
pub fn inc(&mut self) {
*self = SampleIndex(self.0 + 1);
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn checked_sub(self, rhs: Self) -> Option<SampleIndexRange> {
self.0.checked_sub(rhs.0).map(SampleIndexRange::new)
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn saturating_sub(self, rhs: Self) -> SampleIndexRange {
SampleIndexRange::new(self.0.saturating_sub(rhs.0))
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn saturating_sub_range(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex::new(self.0.saturating_sub(rhs.as_usize()))
}
}
impl From<usize> for SampleIndex {
fn from(value: usize) -> Self {
Self(value)
}
}
impl From<u64> for SampleIndex {
fn from(value: u64) -> Self {
Self(value as usize)
}
}
impl From<SampleIndexRange> for SampleIndex {
fn from(range: SampleIndexRange) -> Self {
Self(range.as_usize())
}
}
impl Sub for SampleIndex {
type Output = SampleIndexRange;
#[track_caller]
fn sub(self, rhs: SampleIndex) -> SampleIndexRange {
SampleIndexRange::new(self.0 - rhs.0)
}
}
impl Add<SampleIndexRange> for SampleIndex {
type Output = SampleIndex;
#[track_caller]
fn
|
(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 + rhs.as_usize())
}
}
impl AddAssign<SampleIndexRange> for SampleIndex {
#[track_caller]
fn add_assign(&mut self, rhs: SampleIndexRange) {
*self = SampleIndex(self.0 + rhs.as_usize());
}
}
impl Sub<SampleIndexRange> for SampleIndex {
type Output = SampleIndex;
#[track_caller]
fn sub(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 - rhs.as_usize())
}
}
impl PartialOrd<SampleIndexRange> for SampleIndex {
fn partial_cmp(&self, rhs: &SampleIndexRange) -> Option<Ordering> {
Some(self.0.cmp(&rhs.as_usize()))
}
}
impl PartialEq<SampleIndexRange> for SampleIndex {
fn eq(&self, rhs: &SampleIndexRange) -> bool {
self.0 == rhs.as_usize()
}
}
impl fmt::Display for SampleIndex {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "idx {}", self.0)
}
}
|
add
|
identifier_name
|
sample_index.rs
|
use std::{
cmp::Ordering,
error::Error,
fmt,
ops::{Add, AddAssign, Sub},
};
use metadata::Duration;
use super::{SampleIndexRange, Timestamp};
#[derive(Debug)]
pub struct DecrementError;
impl fmt::Display for DecrementError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
f.write_str("overflow while decrementing")
}
}
impl Error for DecrementError {}
#[derive(Clone, Copy, Default, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub struct SampleIndex(usize);
impl SampleIndex {
pub fn new(value: usize) -> Self {
SampleIndex(value)
}
#[track_caller]
pub fn from_ts(ts: Timestamp, sample_duration: Duration) -> Self {
SampleIndex((ts.as_u64() / sample_duration.as_u64()) as usize)
}
#[track_caller]
pub fn snap_to(self, sample_step: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 / sample_step.as_usize() * sample_step.as_usize())
}
#[track_caller]
pub fn as_ts(self, sample_duration: Duration) -> Timestamp {
Timestamp::new(self.0 as u64 * sample_duration.as_u64())
}
pub fn as_usize(self) -> usize {
self.0
}
pub fn as_u64(self) -> u64 {
self.0 as u64
}
pub fn try_dec(&mut self) -> Result<(), DecrementError> {
if self.0 > 0 {
*self = SampleIndex(self.0 - 1);
Ok(())
} else {
Err(DecrementError)
}
}
pub fn inc(&mut self) {
*self = SampleIndex(self.0 + 1);
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn checked_sub(self, rhs: Self) -> Option<SampleIndexRange> {
self.0.checked_sub(rhs.0).map(SampleIndexRange::new)
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn saturating_sub(self, rhs: Self) -> SampleIndexRange {
SampleIndexRange::new(self.0.saturating_sub(rhs.0))
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn saturating_sub_range(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex::new(self.0.saturating_sub(rhs.as_usize()))
}
}
impl From<usize> for SampleIndex {
fn from(value: usize) -> Self {
Self(value)
}
}
impl From<u64> for SampleIndex {
fn from(value: u64) -> Self {
Self(value as usize)
}
}
impl From<SampleIndexRange> for SampleIndex {
fn from(range: SampleIndexRange) -> Self {
Self(range.as_usize())
}
}
impl Sub for SampleIndex {
type Output = SampleIndexRange;
#[track_caller]
fn sub(self, rhs: SampleIndex) -> SampleIndexRange {
SampleIndexRange::new(self.0 - rhs.0)
}
}
impl Add<SampleIndexRange> for SampleIndex {
type Output = SampleIndex;
#[track_caller]
fn add(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 + rhs.as_usize())
}
}
|
#[track_caller]
fn add_assign(&mut self, rhs: SampleIndexRange) {
*self = SampleIndex(self.0 + rhs.as_usize());
}
}
impl Sub<SampleIndexRange> for SampleIndex {
type Output = SampleIndex;
#[track_caller]
fn sub(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 - rhs.as_usize())
}
}
impl PartialOrd<SampleIndexRange> for SampleIndex {
fn partial_cmp(&self, rhs: &SampleIndexRange) -> Option<Ordering> {
Some(self.0.cmp(&rhs.as_usize()))
}
}
impl PartialEq<SampleIndexRange> for SampleIndex {
fn eq(&self, rhs: &SampleIndexRange) -> bool {
self.0 == rhs.as_usize()
}
}
impl fmt::Display for SampleIndex {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "idx {}", self.0)
}
}
|
impl AddAssign<SampleIndexRange> for SampleIndex {
|
random_line_split
|
sample_index.rs
|
use std::{
cmp::Ordering,
error::Error,
fmt,
ops::{Add, AddAssign, Sub},
};
use metadata::Duration;
use super::{SampleIndexRange, Timestamp};
#[derive(Debug)]
pub struct DecrementError;
impl fmt::Display for DecrementError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
f.write_str("overflow while decrementing")
}
}
impl Error for DecrementError {}
#[derive(Clone, Copy, Default, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub struct SampleIndex(usize);
impl SampleIndex {
pub fn new(value: usize) -> Self {
SampleIndex(value)
}
#[track_caller]
pub fn from_ts(ts: Timestamp, sample_duration: Duration) -> Self {
SampleIndex((ts.as_u64() / sample_duration.as_u64()) as usize)
}
#[track_caller]
pub fn snap_to(self, sample_step: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 / sample_step.as_usize() * sample_step.as_usize())
}
#[track_caller]
pub fn as_ts(self, sample_duration: Duration) -> Timestamp {
Timestamp::new(self.0 as u64 * sample_duration.as_u64())
}
pub fn as_usize(self) -> usize {
self.0
}
pub fn as_u64(self) -> u64 {
self.0 as u64
}
pub fn try_dec(&mut self) -> Result<(), DecrementError> {
if self.0 > 0 {
*self = SampleIndex(self.0 - 1);
Ok(())
} else {
Err(DecrementError)
}
}
pub fn inc(&mut self) {
*self = SampleIndex(self.0 + 1);
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn checked_sub(self, rhs: Self) -> Option<SampleIndexRange> {
self.0.checked_sub(rhs.0).map(SampleIndexRange::new)
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn saturating_sub(self, rhs: Self) -> SampleIndexRange {
SampleIndexRange::new(self.0.saturating_sub(rhs.0))
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn saturating_sub_range(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex::new(self.0.saturating_sub(rhs.as_usize()))
}
}
impl From<usize> for SampleIndex {
fn from(value: usize) -> Self {
Self(value)
}
}
impl From<u64> for SampleIndex {
fn from(value: u64) -> Self {
Self(value as usize)
}
}
impl From<SampleIndexRange> for SampleIndex {
fn from(range: SampleIndexRange) -> Self
|
}
impl Sub for SampleIndex {
type Output = SampleIndexRange;
#[track_caller]
fn sub(self, rhs: SampleIndex) -> SampleIndexRange {
SampleIndexRange::new(self.0 - rhs.0)
}
}
impl Add<SampleIndexRange> for SampleIndex {
type Output = SampleIndex;
#[track_caller]
fn add(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 + rhs.as_usize())
}
}
impl AddAssign<SampleIndexRange> for SampleIndex {
#[track_caller]
fn add_assign(&mut self, rhs: SampleIndexRange) {
*self = SampleIndex(self.0 + rhs.as_usize());
}
}
impl Sub<SampleIndexRange> for SampleIndex {
type Output = SampleIndex;
#[track_caller]
fn sub(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 - rhs.as_usize())
}
}
impl PartialOrd<SampleIndexRange> for SampleIndex {
fn partial_cmp(&self, rhs: &SampleIndexRange) -> Option<Ordering> {
Some(self.0.cmp(&rhs.as_usize()))
}
}
impl PartialEq<SampleIndexRange> for SampleIndex {
fn eq(&self, rhs: &SampleIndexRange) -> bool {
self.0 == rhs.as_usize()
}
}
impl fmt::Display for SampleIndex {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "idx {}", self.0)
}
}
|
{
Self(range.as_usize())
}
|
identifier_body
|
sample_index.rs
|
use std::{
cmp::Ordering,
error::Error,
fmt,
ops::{Add, AddAssign, Sub},
};
use metadata::Duration;
use super::{SampleIndexRange, Timestamp};
#[derive(Debug)]
pub struct DecrementError;
impl fmt::Display for DecrementError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
f.write_str("overflow while decrementing")
}
}
impl Error for DecrementError {}
#[derive(Clone, Copy, Default, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub struct SampleIndex(usize);
impl SampleIndex {
pub fn new(value: usize) -> Self {
SampleIndex(value)
}
#[track_caller]
pub fn from_ts(ts: Timestamp, sample_duration: Duration) -> Self {
SampleIndex((ts.as_u64() / sample_duration.as_u64()) as usize)
}
#[track_caller]
pub fn snap_to(self, sample_step: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 / sample_step.as_usize() * sample_step.as_usize())
}
#[track_caller]
pub fn as_ts(self, sample_duration: Duration) -> Timestamp {
Timestamp::new(self.0 as u64 * sample_duration.as_u64())
}
pub fn as_usize(self) -> usize {
self.0
}
pub fn as_u64(self) -> u64 {
self.0 as u64
}
pub fn try_dec(&mut self) -> Result<(), DecrementError> {
if self.0 > 0 {
*self = SampleIndex(self.0 - 1);
Ok(())
} else
|
}
pub fn inc(&mut self) {
*self = SampleIndex(self.0 + 1);
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn checked_sub(self, rhs: Self) -> Option<SampleIndexRange> {
self.0.checked_sub(rhs.0).map(SampleIndexRange::new)
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn saturating_sub(self, rhs: Self) -> SampleIndexRange {
SampleIndexRange::new(self.0.saturating_sub(rhs.0))
}
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn saturating_sub_range(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex::new(self.0.saturating_sub(rhs.as_usize()))
}
}
impl From<usize> for SampleIndex {
fn from(value: usize) -> Self {
Self(value)
}
}
impl From<u64> for SampleIndex {
fn from(value: u64) -> Self {
Self(value as usize)
}
}
impl From<SampleIndexRange> for SampleIndex {
fn from(range: SampleIndexRange) -> Self {
Self(range.as_usize())
}
}
impl Sub for SampleIndex {
type Output = SampleIndexRange;
#[track_caller]
fn sub(self, rhs: SampleIndex) -> SampleIndexRange {
SampleIndexRange::new(self.0 - rhs.0)
}
}
impl Add<SampleIndexRange> for SampleIndex {
type Output = SampleIndex;
#[track_caller]
fn add(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 + rhs.as_usize())
}
}
impl AddAssign<SampleIndexRange> for SampleIndex {
#[track_caller]
fn add_assign(&mut self, rhs: SampleIndexRange) {
*self = SampleIndex(self.0 + rhs.as_usize());
}
}
impl Sub<SampleIndexRange> for SampleIndex {
type Output = SampleIndex;
#[track_caller]
fn sub(self, rhs: SampleIndexRange) -> SampleIndex {
SampleIndex(self.0 - rhs.as_usize())
}
}
impl PartialOrd<SampleIndexRange> for SampleIndex {
fn partial_cmp(&self, rhs: &SampleIndexRange) -> Option<Ordering> {
Some(self.0.cmp(&rhs.as_usize()))
}
}
impl PartialEq<SampleIndexRange> for SampleIndex {
fn eq(&self, rhs: &SampleIndexRange) -> bool {
self.0 == rhs.as_usize()
}
}
impl fmt::Display for SampleIndex {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "idx {}", self.0)
}
}
|
{
Err(DecrementError)
}
|
conditional_block
|
lib.rs
|
// ams - Advanced Memory Scanner
// Copyright (C) 2018 th0rex
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//! Lock free Queue implementation.
use std::cell::UnsafeCell;
use std::sync::atomic::{AtomicUsize, Ordering};
pub struct Queue<T> {
size: usize,
count: AtomicUsize,
write_index: AtomicUsize,
read_index: AtomicUsize,
max_read_index: AtomicUsize,
data: UnsafeCell<Box<[T]>>,
}
impl<T: Clone + Default> Queue<T> {
pub fn new(size: usize) -> Queue<T> {
Queue {
size,
count: AtomicUsize::new(0),
write_index: AtomicUsize::new(0),
read_index: AtomicUsize::new(0),
max_read_index: AtomicUsize::new(0),
data: UnsafeCell::new(vec![T::default(); size].into_boxed_slice()),
}
}
pub fn pop(&self) -> Option<T> {
let mut ret;
let mut current_read_index;
let mut current_max_read_index;
while {
current_read_index = self.read_index.load(Ordering::SeqCst);
current_max_read_index = self.max_read_index.load(Ordering::SeqCst);
if self.count_to_index(current_read_index)
== self.count_to_index(current_max_read_index)
{
return None;
}
ret = unsafe { &*self.data.get() }[self.count_to_index(current_read_index)].clone();
if self.read_index.compare_and_swap(
current_read_index,
current_read_index + 1,
Ordering::SeqCst,
) == current_read_index
{
self.count.fetch_sub(1, Ordering::SeqCst);
return Some(ret);
}
true
} {}
unreachable!();
}
pub fn push(&self, val: T) -> bool {
let mut current_read_index;
let mut current_write_index;
while {
current_read_index = self.read_index.load(Ordering::SeqCst);
current_write_index = self.write_index.load(Ordering::SeqCst);
if self.count_to_index(current_write_index + 1)
== self.count_to_index(current_read_index)
|
self.write_index.compare_and_swap(
current_write_index,
current_write_index + 1,
Ordering::SeqCst,
)!= current_write_index
} {}
unsafe {
let slice = &mut **self.data.get();
slice[self.count_to_index(current_write_index)] = val;
}
while self.max_read_index.compare_and_swap(
current_write_index,
current_write_index + 1,
Ordering::SeqCst,
)!= current_write_index
{}
self.count.fetch_add(1, Ordering::SeqCst);
true
}
pub fn size(&self) -> usize {
self.size
}
fn count_to_index(&self, to: usize) -> usize {
to % self.size
}
}
unsafe impl<T> Sync for Queue<T> {}
|
{
return false;
}
|
conditional_block
|
lib.rs
|
// ams - Advanced Memory Scanner
// Copyright (C) 2018 th0rex
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//! Lock free Queue implementation.
use std::cell::UnsafeCell;
use std::sync::atomic::{AtomicUsize, Ordering};
pub struct Queue<T> {
size: usize,
count: AtomicUsize,
write_index: AtomicUsize,
|
impl<T: Clone + Default> Queue<T> {
pub fn new(size: usize) -> Queue<T> {
Queue {
size,
count: AtomicUsize::new(0),
write_index: AtomicUsize::new(0),
read_index: AtomicUsize::new(0),
max_read_index: AtomicUsize::new(0),
data: UnsafeCell::new(vec![T::default(); size].into_boxed_slice()),
}
}
pub fn pop(&self) -> Option<T> {
let mut ret;
let mut current_read_index;
let mut current_max_read_index;
while {
current_read_index = self.read_index.load(Ordering::SeqCst);
current_max_read_index = self.max_read_index.load(Ordering::SeqCst);
if self.count_to_index(current_read_index)
== self.count_to_index(current_max_read_index)
{
return None;
}
ret = unsafe { &*self.data.get() }[self.count_to_index(current_read_index)].clone();
if self.read_index.compare_and_swap(
current_read_index,
current_read_index + 1,
Ordering::SeqCst,
) == current_read_index
{
self.count.fetch_sub(1, Ordering::SeqCst);
return Some(ret);
}
true
} {}
unreachable!();
}
pub fn push(&self, val: T) -> bool {
let mut current_read_index;
let mut current_write_index;
while {
current_read_index = self.read_index.load(Ordering::SeqCst);
current_write_index = self.write_index.load(Ordering::SeqCst);
if self.count_to_index(current_write_index + 1)
== self.count_to_index(current_read_index)
{
return false;
}
self.write_index.compare_and_swap(
current_write_index,
current_write_index + 1,
Ordering::SeqCst,
)!= current_write_index
} {}
unsafe {
let slice = &mut **self.data.get();
slice[self.count_to_index(current_write_index)] = val;
}
while self.max_read_index.compare_and_swap(
current_write_index,
current_write_index + 1,
Ordering::SeqCst,
)!= current_write_index
{}
self.count.fetch_add(1, Ordering::SeqCst);
true
}
pub fn size(&self) -> usize {
self.size
}
fn count_to_index(&self, to: usize) -> usize {
to % self.size
}
}
unsafe impl<T> Sync for Queue<T> {}
|
read_index: AtomicUsize,
max_read_index: AtomicUsize,
data: UnsafeCell<Box<[T]>>,
}
|
random_line_split
|
lib.rs
|
// ams - Advanced Memory Scanner
// Copyright (C) 2018 th0rex
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//! Lock free Queue implementation.
use std::cell::UnsafeCell;
use std::sync::atomic::{AtomicUsize, Ordering};
pub struct
|
<T> {
size: usize,
count: AtomicUsize,
write_index: AtomicUsize,
read_index: AtomicUsize,
max_read_index: AtomicUsize,
data: UnsafeCell<Box<[T]>>,
}
impl<T: Clone + Default> Queue<T> {
pub fn new(size: usize) -> Queue<T> {
Queue {
size,
count: AtomicUsize::new(0),
write_index: AtomicUsize::new(0),
read_index: AtomicUsize::new(0),
max_read_index: AtomicUsize::new(0),
data: UnsafeCell::new(vec![T::default(); size].into_boxed_slice()),
}
}
pub fn pop(&self) -> Option<T> {
let mut ret;
let mut current_read_index;
let mut current_max_read_index;
while {
current_read_index = self.read_index.load(Ordering::SeqCst);
current_max_read_index = self.max_read_index.load(Ordering::SeqCst);
if self.count_to_index(current_read_index)
== self.count_to_index(current_max_read_index)
{
return None;
}
ret = unsafe { &*self.data.get() }[self.count_to_index(current_read_index)].clone();
if self.read_index.compare_and_swap(
current_read_index,
current_read_index + 1,
Ordering::SeqCst,
) == current_read_index
{
self.count.fetch_sub(1, Ordering::SeqCst);
return Some(ret);
}
true
} {}
unreachable!();
}
pub fn push(&self, val: T) -> bool {
let mut current_read_index;
let mut current_write_index;
while {
current_read_index = self.read_index.load(Ordering::SeqCst);
current_write_index = self.write_index.load(Ordering::SeqCst);
if self.count_to_index(current_write_index + 1)
== self.count_to_index(current_read_index)
{
return false;
}
self.write_index.compare_and_swap(
current_write_index,
current_write_index + 1,
Ordering::SeqCst,
)!= current_write_index
} {}
unsafe {
let slice = &mut **self.data.get();
slice[self.count_to_index(current_write_index)] = val;
}
while self.max_read_index.compare_and_swap(
current_write_index,
current_write_index + 1,
Ordering::SeqCst,
)!= current_write_index
{}
self.count.fetch_add(1, Ordering::SeqCst);
true
}
pub fn size(&self) -> usize {
self.size
}
fn count_to_index(&self, to: usize) -> usize {
to % self.size
}
}
unsafe impl<T> Sync for Queue<T> {}
|
Queue
|
identifier_name
|
lib.rs
|
// ams - Advanced Memory Scanner
// Copyright (C) 2018 th0rex
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//! Lock free Queue implementation.
use std::cell::UnsafeCell;
use std::sync::atomic::{AtomicUsize, Ordering};
pub struct Queue<T> {
size: usize,
count: AtomicUsize,
write_index: AtomicUsize,
read_index: AtomicUsize,
max_read_index: AtomicUsize,
data: UnsafeCell<Box<[T]>>,
}
impl<T: Clone + Default> Queue<T> {
pub fn new(size: usize) -> Queue<T> {
Queue {
size,
count: AtomicUsize::new(0),
write_index: AtomicUsize::new(0),
read_index: AtomicUsize::new(0),
max_read_index: AtomicUsize::new(0),
data: UnsafeCell::new(vec![T::default(); size].into_boxed_slice()),
}
}
pub fn pop(&self) -> Option<T> {
let mut ret;
let mut current_read_index;
let mut current_max_read_index;
while {
current_read_index = self.read_index.load(Ordering::SeqCst);
current_max_read_index = self.max_read_index.load(Ordering::SeqCst);
if self.count_to_index(current_read_index)
== self.count_to_index(current_max_read_index)
{
return None;
}
ret = unsafe { &*self.data.get() }[self.count_to_index(current_read_index)].clone();
if self.read_index.compare_and_swap(
current_read_index,
current_read_index + 1,
Ordering::SeqCst,
) == current_read_index
{
self.count.fetch_sub(1, Ordering::SeqCst);
return Some(ret);
}
true
} {}
unreachable!();
}
pub fn push(&self, val: T) -> bool {
let mut current_read_index;
let mut current_write_index;
while {
current_read_index = self.read_index.load(Ordering::SeqCst);
current_write_index = self.write_index.load(Ordering::SeqCst);
if self.count_to_index(current_write_index + 1)
== self.count_to_index(current_read_index)
{
return false;
}
self.write_index.compare_and_swap(
current_write_index,
current_write_index + 1,
Ordering::SeqCst,
)!= current_write_index
} {}
unsafe {
let slice = &mut **self.data.get();
slice[self.count_to_index(current_write_index)] = val;
}
while self.max_read_index.compare_and_swap(
current_write_index,
current_write_index + 1,
Ordering::SeqCst,
)!= current_write_index
{}
self.count.fetch_add(1, Ordering::SeqCst);
true
}
pub fn size(&self) -> usize {
self.size
}
fn count_to_index(&self, to: usize) -> usize
|
}
unsafe impl<T> Sync for Queue<T> {}
|
{
to % self.size
}
|
identifier_body
|
config.rs
|
extern crate toml;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
#[derive(Deserialize)]
pub struct
|
{
pub inoreader_appkey: String,
pub inoreader_appid: String,
pub inoreader_token: String,
pub inoreader_endpoint: String,
telegram_token: String,
telegram_endpoint: String,
pub db_path: String,
pub timeout: u64,
}
impl Config {
pub fn new(path: &str) -> Config {
let file_name = Path::new(path);
let mut file = File::open(&file_name).unwrap();
let mut content = String::new();
//читаем содержимое файла, если ошибка паникуем
file.read_to_string(&mut content).unwrap();
// производим маппинг настроек из файла в стуруктуру
toml::from_str(&content.to_string()).unwrap()
}
pub fn get_telegram_bot_endpoint(&self) -> String {
let mut main_ep = self.telegram_endpoint.to_owned();
let bot_ep = format!("/bot{}", &self.telegram_token);
main_ep.push_str(&bot_ep);
main_ep
}
}
|
Config
|
identifier_name
|
config.rs
|
extern crate toml;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
#[derive(Deserialize)]
pub struct Config {
pub inoreader_appkey: String,
|
pub db_path: String,
pub timeout: u64,
}
impl Config {
pub fn new(path: &str) -> Config {
let file_name = Path::new(path);
let mut file = File::open(&file_name).unwrap();
let mut content = String::new();
//читаем содержимое файла, если ошибка паникуем
file.read_to_string(&mut content).unwrap();
// производим маппинг настроек из файла в стуруктуру
toml::from_str(&content.to_string()).unwrap()
}
pub fn get_telegram_bot_endpoint(&self) -> String {
let mut main_ep = self.telegram_endpoint.to_owned();
let bot_ep = format!("/bot{}", &self.telegram_token);
main_ep.push_str(&bot_ep);
main_ep
}
}
|
pub inoreader_appid: String,
pub inoreader_token: String,
pub inoreader_endpoint: String,
telegram_token: String,
telegram_endpoint: String,
|
random_line_split
|
config.rs
|
extern crate toml;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
#[derive(Deserialize)]
pub struct Config {
pub inoreader_appkey: String,
pub inoreader_appid: String,
pub inoreader_token: String,
pub inoreader_endpoint: String,
telegram_token: String,
telegram_endpoint: String,
pub db_path: String,
pub timeout: u64,
}
impl Config {
pub fn new(path: &str) -> Config
|
= self.telegram_endpoint.to_owned();
let bot_ep = format!("/bot{}", &self.telegram_token);
main_ep.push_str(&bot_ep);
main_ep
}
}
|
{
let file_name = Path::new(path);
let mut file = File::open(&file_name).unwrap();
let mut content = String::new();
//читаем содержимое файла, если ошибка паникуем
file.read_to_string(&mut content).unwrap();
// производим маппинг настроек из файла в стуруктуру
toml::from_str(&content.to_string()).unwrap()
}
pub fn get_telegram_bot_endpoint(&self) -> String {
let mut main_ep
|
identifier_body
|
tensor.rs
|
use std::ffi::CString;
use std::fmt;
use std::ptr::{self, NonNull};
use dynet_sys;
use super::{ApiResult, Dim, Result, Wrap};
/// A struct to represent a tensor.
///
/// # Examples
///
/// ```
/// # use dynet::{DynetParams, ParameterCollection, ParameterInitGlorot};
/// dynet::initialize(&mut DynetParams::from_args(false));
///
/// let mut m = ParameterCollection::new();
///
/// let initializer = ParameterInitGlorot::default();
/// let mut p_W = m.add_parameters([8, 2], &initializer);
/// let t_W = p_W.values();
/// println!("parameter W: dim={}, values=\n[\n{}\n]", t_W.dim(), t_W);
/// let v_W = t_W.as_vector();
/// ```
#[derive(Debug)]
pub struct Tensor {
inner: NonNull<dynet_sys::dynetTensor_t>,
owned: bool,
}
impl_wrap!(Tensor, dynetTensor_t);
impl_drop!(Tensor, dynetDeleteTensor);
impl Tensor {
/// Returns the dim of the tensor.
pub fn dim(&self) -> Dim {
unsafe {
let mut dim_ptr: *mut dynet_sys::dynetDim_t = ptr::null_mut();
check_api_status!(dynet_sys::dynetGetTensorDim(self.as_ptr(), &mut dim_ptr));
Dim::from_raw(dim_ptr, true)
}
}
/// Retrieves one internal value in the tensor.
///
/// # Panics
///
/// Panics if the tensor has more than one element.
pub fn as_scalar(&self) -> f32 {
unsafe {
let mut retval: f32 = 0.0;
check_api_status!(dynet_sys::dynetEvaluateTensorAsScalar(
self.as_ptr(),
&mut retval,
));
retval
}
}
/// Retrieves internal values in the tensor as a vector.
///
/// For higher order tensors this returns the flattened value.
pub fn as_vector(&self) -> Vec<f32> {
unsafe {
let mut size: usize = 0;
check_api_status!(dynet_sys::dynetEvaluateTensorAsArray(
self.as_ptr(),
ptr::null_mut(),
&mut size,
));
let mut retval = vec![0f32; size];
check_api_status!(dynet_sys::dynetEvaluateTensorAsArray(
self.as_ptr(),
retval.as_mut_ptr(),
&mut size,
));
retval
}
}
}
impl fmt::Display for Tensor {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
|
}
|
{
unsafe {
let mut size: usize = 0;
check_api_status!(dynet_sys::dynetRepresentTensorAsString(
self.as_ptr(),
ptr::null_mut(),
&mut size,
));
let buffer = CString::new(vec![b'0'; size]).unwrap().into_raw();
check_api_status!(dynet_sys::dynetRepresentTensorAsString(
self.as_ptr(),
buffer,
&mut size,
));
f.write_str(CString::from_raw(buffer).to_str().unwrap())
}
}
|
identifier_body
|
tensor.rs
|
use std::ffi::CString;
use std::fmt;
use std::ptr::{self, NonNull};
use dynet_sys;
use super::{ApiResult, Dim, Result, Wrap};
/// A struct to represent a tensor.
///
/// # Examples
///
/// ```
/// # use dynet::{DynetParams, ParameterCollection, ParameterInitGlorot};
/// dynet::initialize(&mut DynetParams::from_args(false));
///
/// let mut m = ParameterCollection::new();
///
/// let initializer = ParameterInitGlorot::default();
/// let mut p_W = m.add_parameters([8, 2], &initializer);
/// let t_W = p_W.values();
/// println!("parameter W: dim={}, values=\n[\n{}\n]", t_W.dim(), t_W);
/// let v_W = t_W.as_vector();
/// ```
#[derive(Debug)]
pub struct Tensor {
inner: NonNull<dynet_sys::dynetTensor_t>,
owned: bool,
}
impl_wrap!(Tensor, dynetTensor_t);
impl_drop!(Tensor, dynetDeleteTensor);
impl Tensor {
/// Returns the dim of the tensor.
pub fn
|
(&self) -> Dim {
unsafe {
let mut dim_ptr: *mut dynet_sys::dynetDim_t = ptr::null_mut();
check_api_status!(dynet_sys::dynetGetTensorDim(self.as_ptr(), &mut dim_ptr));
Dim::from_raw(dim_ptr, true)
}
}
/// Retrieves one internal value in the tensor.
///
/// # Panics
///
/// Panics if the tensor has more than one element.
pub fn as_scalar(&self) -> f32 {
unsafe {
let mut retval: f32 = 0.0;
check_api_status!(dynet_sys::dynetEvaluateTensorAsScalar(
self.as_ptr(),
&mut retval,
));
retval
}
}
/// Retrieves internal values in the tensor as a vector.
///
/// For higher order tensors this returns the flattened value.
pub fn as_vector(&self) -> Vec<f32> {
unsafe {
let mut size: usize = 0;
check_api_status!(dynet_sys::dynetEvaluateTensorAsArray(
self.as_ptr(),
ptr::null_mut(),
&mut size,
));
let mut retval = vec![0f32; size];
check_api_status!(dynet_sys::dynetEvaluateTensorAsArray(
self.as_ptr(),
retval.as_mut_ptr(),
&mut size,
));
retval
}
}
}
impl fmt::Display for Tensor {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
unsafe {
let mut size: usize = 0;
check_api_status!(dynet_sys::dynetRepresentTensorAsString(
self.as_ptr(),
ptr::null_mut(),
&mut size,
));
let buffer = CString::new(vec![b'0'; size]).unwrap().into_raw();
check_api_status!(dynet_sys::dynetRepresentTensorAsString(
self.as_ptr(),
buffer,
&mut size,
));
f.write_str(CString::from_raw(buffer).to_str().unwrap())
}
}
}
|
dim
|
identifier_name
|
tensor.rs
|
use std::ffi::CString;
use std::fmt;
use std::ptr::{self, NonNull};
use dynet_sys;
use super::{ApiResult, Dim, Result, Wrap};
/// A struct to represent a tensor.
///
/// # Examples
///
/// ```
/// # use dynet::{DynetParams, ParameterCollection, ParameterInitGlorot};
/// dynet::initialize(&mut DynetParams::from_args(false));
///
/// let mut m = ParameterCollection::new();
///
/// let initializer = ParameterInitGlorot::default();
/// let mut p_W = m.add_parameters([8, 2], &initializer);
/// let t_W = p_W.values();
/// println!("parameter W: dim={}, values=\n[\n{}\n]", t_W.dim(), t_W);
/// let v_W = t_W.as_vector();
/// ```
#[derive(Debug)]
pub struct Tensor {
inner: NonNull<dynet_sys::dynetTensor_t>,
owned: bool,
}
impl_wrap!(Tensor, dynetTensor_t);
impl_drop!(Tensor, dynetDeleteTensor);
impl Tensor {
/// Returns the dim of the tensor.
pub fn dim(&self) -> Dim {
unsafe {
let mut dim_ptr: *mut dynet_sys::dynetDim_t = ptr::null_mut();
check_api_status!(dynet_sys::dynetGetTensorDim(self.as_ptr(), &mut dim_ptr));
Dim::from_raw(dim_ptr, true)
}
}
/// Retrieves one internal value in the tensor.
///
/// # Panics
///
/// Panics if the tensor has more than one element.
pub fn as_scalar(&self) -> f32 {
unsafe {
let mut retval: f32 = 0.0;
check_api_status!(dynet_sys::dynetEvaluateTensorAsScalar(
self.as_ptr(),
&mut retval,
));
retval
}
}
/// Retrieves internal values in the tensor as a vector.
///
/// For higher order tensors this returns the flattened value.
pub fn as_vector(&self) -> Vec<f32> {
unsafe {
let mut size: usize = 0;
check_api_status!(dynet_sys::dynetEvaluateTensorAsArray(
self.as_ptr(),
ptr::null_mut(),
&mut size,
));
let mut retval = vec![0f32; size];
check_api_status!(dynet_sys::dynetEvaluateTensorAsArray(
|
}
}
}
impl fmt::Display for Tensor {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
unsafe {
let mut size: usize = 0;
check_api_status!(dynet_sys::dynetRepresentTensorAsString(
self.as_ptr(),
ptr::null_mut(),
&mut size,
));
let buffer = CString::new(vec![b'0'; size]).unwrap().into_raw();
check_api_status!(dynet_sys::dynetRepresentTensorAsString(
self.as_ptr(),
buffer,
&mut size,
));
f.write_str(CString::from_raw(buffer).to_str().unwrap())
}
}
}
|
self.as_ptr(),
retval.as_mut_ptr(),
&mut size,
));
retval
|
random_line_split
|
mod.rs
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
mod bloom;
mod block;
mod complete;
mod fork;
pub mod generator;
pub use self::complete::BlockFinalizer;
pub use self::generator::{ChainGenerator, ChainIterator};
|
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
random_line_split
|
send-is-not-static-par-for.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::thread;
use std::sync::Mutex;
fn par_for<I, F>(iter: I, f: F)
where I: Iterator,
I::Item: Send,
F: Fn(I::Item) + Sync
|
fn sum(x: &[i32]) {
let sum_lengths = Mutex::new(0);
par_for(x.windows(4), |x| {
*sum_lengths.lock().unwrap() += x.len()
});
assert_eq!(*sum_lengths.lock().unwrap(), (x.len() - 3) * 4);
}
fn main() {
let mut elements = [0; 20];
// iterators over references into this stack frame
par_for(elements.iter_mut().enumerate(), |(i, x)| {
*x = i as i32
});
sum(&elements)
}
|
{
for item in iter {
f(item)
}
}
|
identifier_body
|
send-is-not-static-par-for.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::thread;
use std::sync::Mutex;
fn par_for<I, F>(iter: I, f: F)
where I: Iterator,
I::Item: Send,
F: Fn(I::Item) + Sync
{
for item in iter {
f(item)
|
fn sum(x: &[i32]) {
let sum_lengths = Mutex::new(0);
par_for(x.windows(4), |x| {
*sum_lengths.lock().unwrap() += x.len()
});
assert_eq!(*sum_lengths.lock().unwrap(), (x.len() - 3) * 4);
}
fn main() {
let mut elements = [0; 20];
// iterators over references into this stack frame
par_for(elements.iter_mut().enumerate(), |(i, x)| {
*x = i as i32
});
sum(&elements)
}
|
}
}
|
random_line_split
|
send-is-not-static-par-for.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::thread;
use std::sync::Mutex;
fn par_for<I, F>(iter: I, f: F)
where I: Iterator,
I::Item: Send,
F: Fn(I::Item) + Sync
{
for item in iter {
f(item)
}
}
fn
|
(x: &[i32]) {
let sum_lengths = Mutex::new(0);
par_for(x.windows(4), |x| {
*sum_lengths.lock().unwrap() += x.len()
});
assert_eq!(*sum_lengths.lock().unwrap(), (x.len() - 3) * 4);
}
fn main() {
let mut elements = [0; 20];
// iterators over references into this stack frame
par_for(elements.iter_mut().enumerate(), |(i, x)| {
*x = i as i32
});
sum(&elements)
}
|
sum
|
identifier_name
|
x86.rs
|
use std::any::{Any, TypeId};
use std::mem;
use runtime::{Object, Imp, Sel};
use super::Super;
pub fn msg_send_fn<R: Any>(obj: *mut Object, _: Sel) -> (Imp, *mut Object) {
// Structures 1 or 2 bytes in size are placed in EAX.
// Structures 4 or 8 bytes in size are placed in: EAX and EDX.
// Structures of other sizes are placed at the address supplied by the caller.
// https://developer.apple.com/library/mac/documentation/DeveloperTools/Conceptual/LowLevelABI/130-IA-32_Function_Calling_Conventions/IA32.html
extern {
fn objc_msgSend();
fn objc_msgSend_fpret();
fn objc_msgSend_stret();
}
let type_id = TypeId::of::<R>();
let size = mem::size_of::<R>();
let msg_fn = if type_id == TypeId::of::<f32>() ||
type_id == TypeId::of::<f64>() {
objc_msgSend_fpret
} else if size == 0 || size == 1 || size == 2 || size == 4 || size == 8 {
objc_msgSend
} else {
objc_msgSend_stret
};
(msg_fn, obj)
}
pub fn msg_send_super_fn<R: Any>(sup: &Super, _: Sel) -> (Imp, *mut Object) {
extern {
fn objc_msgSendSuper();
fn objc_msgSendSuper_stret();
}
let size = mem::size_of::<R>();
let msg_fn = if size == 0 || size == 1 || size == 2 || size == 4 || size == 8
|
else {
objc_msgSendSuper_stret
};
(msg_fn, sup as *const Super as *mut Object)
}
|
{
objc_msgSendSuper
}
|
conditional_block
|
x86.rs
|
use std::any::{Any, TypeId};
use std::mem;
use runtime::{Object, Imp, Sel};
use super::Super;
pub fn msg_send_fn<R: Any>(obj: *mut Object, _: Sel) -> (Imp, *mut Object) {
// Structures 1 or 2 bytes in size are placed in EAX.
// Structures 4 or 8 bytes in size are placed in: EAX and EDX.
// Structures of other sizes are placed at the address supplied by the caller.
// https://developer.apple.com/library/mac/documentation/DeveloperTools/Conceptual/LowLevelABI/130-IA-32_Function_Calling_Conventions/IA32.html
extern {
fn objc_msgSend();
fn objc_msgSend_fpret();
fn objc_msgSend_stret();
}
let type_id = TypeId::of::<R>();
let size = mem::size_of::<R>();
let msg_fn = if type_id == TypeId::of::<f32>() ||
type_id == TypeId::of::<f64>() {
objc_msgSend_fpret
} else if size == 0 || size == 1 || size == 2 || size == 4 || size == 8 {
objc_msgSend
} else {
objc_msgSend_stret
};
(msg_fn, obj)
}
pub fn
|
<R: Any>(sup: &Super, _: Sel) -> (Imp, *mut Object) {
extern {
fn objc_msgSendSuper();
fn objc_msgSendSuper_stret();
}
let size = mem::size_of::<R>();
let msg_fn = if size == 0 || size == 1 || size == 2 || size == 4 || size == 8 {
objc_msgSendSuper
} else {
objc_msgSendSuper_stret
};
(msg_fn, sup as *const Super as *mut Object)
}
|
msg_send_super_fn
|
identifier_name
|
x86.rs
|
use std::any::{Any, TypeId};
use std::mem;
use runtime::{Object, Imp, Sel};
use super::Super;
pub fn msg_send_fn<R: Any>(obj: *mut Object, _: Sel) -> (Imp, *mut Object) {
// Structures 1 or 2 bytes in size are placed in EAX.
// Structures 4 or 8 bytes in size are placed in: EAX and EDX.
// Structures of other sizes are placed at the address supplied by the caller.
// https://developer.apple.com/library/mac/documentation/DeveloperTools/Conceptual/LowLevelABI/130-IA-32_Function_Calling_Conventions/IA32.html
extern {
fn objc_msgSend();
fn objc_msgSend_fpret();
fn objc_msgSend_stret();
}
let type_id = TypeId::of::<R>();
let size = mem::size_of::<R>();
let msg_fn = if type_id == TypeId::of::<f32>() ||
type_id == TypeId::of::<f64>() {
objc_msgSend_fpret
} else if size == 0 || size == 1 || size == 2 || size == 4 || size == 8 {
objc_msgSend
} else {
objc_msgSend_stret
};
(msg_fn, obj)
}
pub fn msg_send_super_fn<R: Any>(sup: &Super, _: Sel) -> (Imp, *mut Object) {
extern {
fn objc_msgSendSuper();
fn objc_msgSendSuper_stret();
}
let size = mem::size_of::<R>();
let msg_fn = if size == 0 || size == 1 || size == 2 || size == 4 || size == 8 {
objc_msgSendSuper
} else {
objc_msgSendSuper_stret
};
(msg_fn, sup as *const Super as *mut Object)
|
}
|
random_line_split
|
|
x86.rs
|
use std::any::{Any, TypeId};
use std::mem;
use runtime::{Object, Imp, Sel};
use super::Super;
pub fn msg_send_fn<R: Any>(obj: *mut Object, _: Sel) -> (Imp, *mut Object)
|
objc_msgSend_stret
};
(msg_fn, obj)
}
pub fn msg_send_super_fn<R: Any>(sup: &Super, _: Sel) -> (Imp, *mut Object) {
extern {
fn objc_msgSendSuper();
fn objc_msgSendSuper_stret();
}
let size = mem::size_of::<R>();
let msg_fn = if size == 0 || size == 1 || size == 2 || size == 4 || size == 8 {
objc_msgSendSuper
} else {
objc_msgSendSuper_stret
};
(msg_fn, sup as *const Super as *mut Object)
}
|
{
// Structures 1 or 2 bytes in size are placed in EAX.
// Structures 4 or 8 bytes in size are placed in: EAX and EDX.
// Structures of other sizes are placed at the address supplied by the caller.
// https://developer.apple.com/library/mac/documentation/DeveloperTools/Conceptual/LowLevelABI/130-IA-32_Function_Calling_Conventions/IA32.html
extern {
fn objc_msgSend();
fn objc_msgSend_fpret();
fn objc_msgSend_stret();
}
let type_id = TypeId::of::<R>();
let size = mem::size_of::<R>();
let msg_fn = if type_id == TypeId::of::<f32>() ||
type_id == TypeId::of::<f64>() {
objc_msgSend_fpret
} else if size == 0 || size == 1 || size == 2 || size == 4 || size == 8 {
objc_msgSend
} else {
|
identifier_body
|
cmp.rs
|
//! Comparison traits for `[T]`.
use crate::cmp;
use crate::cmp::Ordering::{self, Greater, Less};
use crate::mem;
use super::from_raw_parts;
use super::memchr;
extern "C" {
/// Calls implementation provided memcmp.
///
/// Interprets the data as u8.
///
/// Returns 0 for equal, < 0 for less than and > 0 for greater
/// than.
// FIXME(#32610): Return type should be c_int
fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A, B> PartialEq<[B]> for [A]
where
A: PartialEq<B>,
{
fn eq(&self, other: &[B]) -> bool {
SlicePartialEq::equal(self, other)
}
fn ne(&self, other: &[B]) -> bool {
SlicePartialEq::not_equal(self, other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Eq> Eq for [T] {}
/// Implements comparison of vectors [lexicographically](Ord#lexicographical-comparison).
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord> Ord for [T] {
fn cmp(&self, other: &[T]) -> Ordering {
SliceOrd::compare(self, other)
}
}
/// Implements comparison of vectors [lexicographically](Ord#lexicographical-comparison).
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PartialOrd> PartialOrd for [T] {
fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
SlicePartialOrd::partial_compare(self, other)
}
}
#[doc(hidden)]
// intermediate trait for specialization of slice's PartialEq
trait SlicePartialEq<B> {
fn equal(&self, other: &[B]) -> bool;
fn not_equal(&self, other: &[B]) -> bool
|
}
// Generic slice equality
impl<A, B> SlicePartialEq<B> for [A]
where
A: PartialEq<B>,
{
default fn equal(&self, other: &[B]) -> bool {
if self.len()!= other.len() {
return false;
}
self.iter().zip(other.iter()).all(|(x, y)| x == y)
}
}
// Use memcmp for bytewise equality when the types allow
impl<A, B> SlicePartialEq<B> for [A]
where
A: BytewiseEquality<B>,
{
fn equal(&self, other: &[B]) -> bool {
if self.len()!= other.len() {
return false;
}
// SAFETY: `self` and `other` are references and are thus guaranteed to be valid.
// The two slices have been checked to have the same size above.
unsafe {
let size = mem::size_of_val(self);
memcmp(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
}
}
}
#[doc(hidden)]
// intermediate trait for specialization of slice's PartialOrd
trait SlicePartialOrd: Sized {
fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
}
impl<A: PartialOrd> SlicePartialOrd for A {
default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
let l = cmp::min(left.len(), right.len());
// Slice to the loop iteration range to enable bound check
// elimination in the compiler
let lhs = &left[..l];
let rhs = &right[..l];
for i in 0..l {
match lhs[i].partial_cmp(&rhs[i]) {
Some(Ordering::Equal) => (),
non_eq => return non_eq,
}
}
left.len().partial_cmp(&right.len())
}
}
// This is the impl that we would like to have. Unfortunately it's not sound.
// See `partial_ord_slice.rs`.
/*
impl<A> SlicePartialOrd for A
where
A: Ord,
{
default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
Some(SliceOrd::compare(left, right))
}
}
*/
impl<A: AlwaysApplicableOrd> SlicePartialOrd for A {
fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
Some(SliceOrd::compare(left, right))
}
}
#[rustc_specialization_trait]
trait AlwaysApplicableOrd: SliceOrd + Ord {}
macro_rules! always_applicable_ord {
($([$($p:tt)*] $t:ty,)*) => {
$(impl<$($p)*> AlwaysApplicableOrd for $t {})*
}
}
always_applicable_ord! {
[] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
[] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
[] bool, [] char,
[T:?Sized] *const T, [T:?Sized] *mut T,
[T: AlwaysApplicableOrd] &T,
[T: AlwaysApplicableOrd] &mut T,
[T: AlwaysApplicableOrd] Option<T>,
}
#[doc(hidden)]
// intermediate trait for specialization of slice's Ord
trait SliceOrd: Sized {
fn compare(left: &[Self], right: &[Self]) -> Ordering;
}
impl<A: Ord> SliceOrd for A {
default fn compare(left: &[Self], right: &[Self]) -> Ordering {
let l = cmp::min(left.len(), right.len());
// Slice to the loop iteration range to enable bound check
// elimination in the compiler
let lhs = &left[..l];
let rhs = &right[..l];
for i in 0..l {
match lhs[i].cmp(&rhs[i]) {
Ordering::Equal => (),
non_eq => return non_eq,
}
}
left.len().cmp(&right.len())
}
}
// memcmp compares a sequence of unsigned bytes lexicographically.
// this matches the order we want for [u8], but no others (not even [i8]).
impl SliceOrd for u8 {
#[inline]
fn compare(left: &[Self], right: &[Self]) -> Ordering {
let order =
// SAFETY: `left` and `right` are references and are thus guaranteed to be valid.
// We use the minimum of both lengths which guarantees that both regions are
// valid for reads in that interval.
unsafe { memcmp(left.as_ptr(), right.as_ptr(), cmp::min(left.len(), right.len())) };
if order == 0 {
left.len().cmp(&right.len())
} else if order < 0 {
Less
} else {
Greater
}
}
}
// Hack to allow specializing on `Eq` even though `Eq` has a method.
#[rustc_unsafe_specialization_marker]
trait MarkerEq<T>: PartialEq<T> {}
impl<T: Eq> MarkerEq<T> for T {}
#[doc(hidden)]
/// Trait implemented for types that can be compared for equality using
/// their bytewise representation
#[rustc_specialization_trait]
trait BytewiseEquality<T>: MarkerEq<T> + Copy {}
macro_rules! impl_marker_for {
($traitname:ident, $($ty:ty)*) => {
$(
impl $traitname<$ty> for $ty { }
)*
}
}
impl_marker_for!(BytewiseEquality,
u8 i8 u16 i16 u32 i32 u64 i64 u128 i128 usize isize char bool);
pub(super) trait SliceContains: Sized {
fn slice_contains(&self, x: &[Self]) -> bool;
}
impl<T> SliceContains for T
where
T: PartialEq,
{
default fn slice_contains(&self, x: &[Self]) -> bool {
x.iter().any(|y| *y == *self)
}
}
impl SliceContains for u8 {
#[inline]
fn slice_contains(&self, x: &[Self]) -> bool {
memchr::memchr(*self, x).is_some()
}
}
impl SliceContains for i8 {
#[inline]
fn slice_contains(&self, x: &[Self]) -> bool {
let byte = *self as u8;
// SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
// as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
// to be valid for reads for the length of the slice `x.len()`, which cannot be larger
// than `isize::MAX`. The returned slice is never mutated.
let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
memchr::memchr(byte, bytes).is_some()
}
}
|
{
!self.equal(other)
}
|
identifier_body
|
cmp.rs
|
//! Comparison traits for `[T]`.
use crate::cmp;
use crate::cmp::Ordering::{self, Greater, Less};
use crate::mem;
use super::from_raw_parts;
use super::memchr;
extern "C" {
/// Calls implementation provided memcmp.
///
/// Interprets the data as u8.
///
/// Returns 0 for equal, < 0 for less than and > 0 for greater
/// than.
// FIXME(#32610): Return type should be c_int
fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A, B> PartialEq<[B]> for [A]
where
A: PartialEq<B>,
{
fn eq(&self, other: &[B]) -> bool {
SlicePartialEq::equal(self, other)
}
fn ne(&self, other: &[B]) -> bool {
SlicePartialEq::not_equal(self, other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Eq> Eq for [T] {}
/// Implements comparison of vectors [lexicographically](Ord#lexicographical-comparison).
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord> Ord for [T] {
fn cmp(&self, other: &[T]) -> Ordering {
SliceOrd::compare(self, other)
}
}
/// Implements comparison of vectors [lexicographically](Ord#lexicographical-comparison).
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PartialOrd> PartialOrd for [T] {
fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
SlicePartialOrd::partial_compare(self, other)
}
}
#[doc(hidden)]
// intermediate trait for specialization of slice's PartialEq
trait SlicePartialEq<B> {
fn equal(&self, other: &[B]) -> bool;
fn not_equal(&self, other: &[B]) -> bool {
!self.equal(other)
}
}
// Generic slice equality
impl<A, B> SlicePartialEq<B> for [A]
where
A: PartialEq<B>,
{
default fn equal(&self, other: &[B]) -> bool {
if self.len()!= other.len() {
return false;
}
self.iter().zip(other.iter()).all(|(x, y)| x == y)
}
}
// Use memcmp for bytewise equality when the types allow
impl<A, B> SlicePartialEq<B> for [A]
where
A: BytewiseEquality<B>,
{
fn equal(&self, other: &[B]) -> bool {
if self.len()!= other.len() {
return false;
}
// SAFETY: `self` and `other` are references and are thus guaranteed to be valid.
// The two slices have been checked to have the same size above.
unsafe {
let size = mem::size_of_val(self);
memcmp(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
}
}
}
#[doc(hidden)]
// intermediate trait for specialization of slice's PartialOrd
trait SlicePartialOrd: Sized {
fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
}
impl<A: PartialOrd> SlicePartialOrd for A {
default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
let l = cmp::min(left.len(), right.len());
// Slice to the loop iteration range to enable bound check
// elimination in the compiler
let lhs = &left[..l];
let rhs = &right[..l];
for i in 0..l {
match lhs[i].partial_cmp(&rhs[i]) {
|
left.len().partial_cmp(&right.len())
}
}
// This is the impl that we would like to have. Unfortunately it's not sound.
// See `partial_ord_slice.rs`.
/*
impl<A> SlicePartialOrd for A
where
A: Ord,
{
default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
Some(SliceOrd::compare(left, right))
}
}
*/
impl<A: AlwaysApplicableOrd> SlicePartialOrd for A {
fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
Some(SliceOrd::compare(left, right))
}
}
#[rustc_specialization_trait]
trait AlwaysApplicableOrd: SliceOrd + Ord {}
macro_rules! always_applicable_ord {
($([$($p:tt)*] $t:ty,)*) => {
$(impl<$($p)*> AlwaysApplicableOrd for $t {})*
}
}
always_applicable_ord! {
[] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
[] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
[] bool, [] char,
[T:?Sized] *const T, [T:?Sized] *mut T,
[T: AlwaysApplicableOrd] &T,
[T: AlwaysApplicableOrd] &mut T,
[T: AlwaysApplicableOrd] Option<T>,
}
#[doc(hidden)]
// intermediate trait for specialization of slice's Ord
trait SliceOrd: Sized {
fn compare(left: &[Self], right: &[Self]) -> Ordering;
}
impl<A: Ord> SliceOrd for A {
default fn compare(left: &[Self], right: &[Self]) -> Ordering {
let l = cmp::min(left.len(), right.len());
// Slice to the loop iteration range to enable bound check
// elimination in the compiler
let lhs = &left[..l];
let rhs = &right[..l];
for i in 0..l {
match lhs[i].cmp(&rhs[i]) {
Ordering::Equal => (),
non_eq => return non_eq,
}
}
left.len().cmp(&right.len())
}
}
// memcmp compares a sequence of unsigned bytes lexicographically.
// this matches the order we want for [u8], but no others (not even [i8]).
impl SliceOrd for u8 {
#[inline]
fn compare(left: &[Self], right: &[Self]) -> Ordering {
let order =
// SAFETY: `left` and `right` are references and are thus guaranteed to be valid.
// We use the minimum of both lengths which guarantees that both regions are
// valid for reads in that interval.
unsafe { memcmp(left.as_ptr(), right.as_ptr(), cmp::min(left.len(), right.len())) };
if order == 0 {
left.len().cmp(&right.len())
} else if order < 0 {
Less
} else {
Greater
}
}
}
// Hack to allow specializing on `Eq` even though `Eq` has a method.
#[rustc_unsafe_specialization_marker]
trait MarkerEq<T>: PartialEq<T> {}
impl<T: Eq> MarkerEq<T> for T {}
#[doc(hidden)]
/// Trait implemented for types that can be compared for equality using
/// their bytewise representation
#[rustc_specialization_trait]
trait BytewiseEquality<T>: MarkerEq<T> + Copy {}
macro_rules! impl_marker_for {
($traitname:ident, $($ty:ty)*) => {
$(
impl $traitname<$ty> for $ty { }
)*
}
}
impl_marker_for!(BytewiseEquality,
u8 i8 u16 i16 u32 i32 u64 i64 u128 i128 usize isize char bool);
pub(super) trait SliceContains: Sized {
fn slice_contains(&self, x: &[Self]) -> bool;
}
impl<T> SliceContains for T
where
T: PartialEq,
{
default fn slice_contains(&self, x: &[Self]) -> bool {
x.iter().any(|y| *y == *self)
}
}
impl SliceContains for u8 {
#[inline]
fn slice_contains(&self, x: &[Self]) -> bool {
memchr::memchr(*self, x).is_some()
}
}
impl SliceContains for i8 {
#[inline]
fn slice_contains(&self, x: &[Self]) -> bool {
let byte = *self as u8;
// SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
// as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
// to be valid for reads for the length of the slice `x.len()`, which cannot be larger
// than `isize::MAX`. The returned slice is never mutated.
let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
memchr::memchr(byte, bytes).is_some()
}
}
|
Some(Ordering::Equal) => (),
non_eq => return non_eq,
}
}
|
random_line_split
|
cmp.rs
|
//! Comparison traits for `[T]`.
use crate::cmp;
use crate::cmp::Ordering::{self, Greater, Less};
use crate::mem;
use super::from_raw_parts;
use super::memchr;
extern "C" {
/// Calls implementation provided memcmp.
///
/// Interprets the data as u8.
///
/// Returns 0 for equal, < 0 for less than and > 0 for greater
/// than.
// FIXME(#32610): Return type should be c_int
fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A, B> PartialEq<[B]> for [A]
where
A: PartialEq<B>,
{
fn eq(&self, other: &[B]) -> bool {
SlicePartialEq::equal(self, other)
}
fn ne(&self, other: &[B]) -> bool {
SlicePartialEq::not_equal(self, other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Eq> Eq for [T] {}
/// Implements comparison of vectors [lexicographically](Ord#lexicographical-comparison).
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord> Ord for [T] {
fn cmp(&self, other: &[T]) -> Ordering {
SliceOrd::compare(self, other)
}
}
/// Implements comparison of vectors [lexicographically](Ord#lexicographical-comparison).
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PartialOrd> PartialOrd for [T] {
fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
SlicePartialOrd::partial_compare(self, other)
}
}
#[doc(hidden)]
// intermediate trait for specialization of slice's PartialEq
trait SlicePartialEq<B> {
fn equal(&self, other: &[B]) -> bool;
fn not_equal(&self, other: &[B]) -> bool {
!self.equal(other)
}
}
// Generic slice equality
impl<A, B> SlicePartialEq<B> for [A]
where
A: PartialEq<B>,
{
default fn
|
(&self, other: &[B]) -> bool {
if self.len()!= other.len() {
return false;
}
self.iter().zip(other.iter()).all(|(x, y)| x == y)
}
}
// Use memcmp for bytewise equality when the types allow
impl<A, B> SlicePartialEq<B> for [A]
where
A: BytewiseEquality<B>,
{
fn equal(&self, other: &[B]) -> bool {
if self.len()!= other.len() {
return false;
}
// SAFETY: `self` and `other` are references and are thus guaranteed to be valid.
// The two slices have been checked to have the same size above.
unsafe {
let size = mem::size_of_val(self);
memcmp(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
}
}
}
#[doc(hidden)]
// intermediate trait for specialization of slice's PartialOrd
trait SlicePartialOrd: Sized {
fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
}
impl<A: PartialOrd> SlicePartialOrd for A {
default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
let l = cmp::min(left.len(), right.len());
// Slice to the loop iteration range to enable bound check
// elimination in the compiler
let lhs = &left[..l];
let rhs = &right[..l];
for i in 0..l {
match lhs[i].partial_cmp(&rhs[i]) {
Some(Ordering::Equal) => (),
non_eq => return non_eq,
}
}
left.len().partial_cmp(&right.len())
}
}
// This is the impl that we would like to have. Unfortunately it's not sound.
// See `partial_ord_slice.rs`.
/*
impl<A> SlicePartialOrd for A
where
A: Ord,
{
default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
Some(SliceOrd::compare(left, right))
}
}
*/
impl<A: AlwaysApplicableOrd> SlicePartialOrd for A {
fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
Some(SliceOrd::compare(left, right))
}
}
#[rustc_specialization_trait]
trait AlwaysApplicableOrd: SliceOrd + Ord {}
macro_rules! always_applicable_ord {
($([$($p:tt)*] $t:ty,)*) => {
$(impl<$($p)*> AlwaysApplicableOrd for $t {})*
}
}
always_applicable_ord! {
[] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
[] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
[] bool, [] char,
[T:?Sized] *const T, [T:?Sized] *mut T,
[T: AlwaysApplicableOrd] &T,
[T: AlwaysApplicableOrd] &mut T,
[T: AlwaysApplicableOrd] Option<T>,
}
#[doc(hidden)]
// intermediate trait for specialization of slice's Ord
trait SliceOrd: Sized {
fn compare(left: &[Self], right: &[Self]) -> Ordering;
}
impl<A: Ord> SliceOrd for A {
default fn compare(left: &[Self], right: &[Self]) -> Ordering {
let l = cmp::min(left.len(), right.len());
// Slice to the loop iteration range to enable bound check
// elimination in the compiler
let lhs = &left[..l];
let rhs = &right[..l];
for i in 0..l {
match lhs[i].cmp(&rhs[i]) {
Ordering::Equal => (),
non_eq => return non_eq,
}
}
left.len().cmp(&right.len())
}
}
// memcmp compares a sequence of unsigned bytes lexicographically.
// this matches the order we want for [u8], but no others (not even [i8]).
impl SliceOrd for u8 {
#[inline]
fn compare(left: &[Self], right: &[Self]) -> Ordering {
let order =
// SAFETY: `left` and `right` are references and are thus guaranteed to be valid.
// We use the minimum of both lengths which guarantees that both regions are
// valid for reads in that interval.
unsafe { memcmp(left.as_ptr(), right.as_ptr(), cmp::min(left.len(), right.len())) };
if order == 0 {
left.len().cmp(&right.len())
} else if order < 0 {
Less
} else {
Greater
}
}
}
// Hack to allow specializing on `Eq` even though `Eq` has a method.
#[rustc_unsafe_specialization_marker]
trait MarkerEq<T>: PartialEq<T> {}
impl<T: Eq> MarkerEq<T> for T {}
#[doc(hidden)]
/// Trait implemented for types that can be compared for equality using
/// their bytewise representation
#[rustc_specialization_trait]
trait BytewiseEquality<T>: MarkerEq<T> + Copy {}
macro_rules! impl_marker_for {
($traitname:ident, $($ty:ty)*) => {
$(
impl $traitname<$ty> for $ty { }
)*
}
}
impl_marker_for!(BytewiseEquality,
u8 i8 u16 i16 u32 i32 u64 i64 u128 i128 usize isize char bool);
pub(super) trait SliceContains: Sized {
fn slice_contains(&self, x: &[Self]) -> bool;
}
impl<T> SliceContains for T
where
T: PartialEq,
{
default fn slice_contains(&self, x: &[Self]) -> bool {
x.iter().any(|y| *y == *self)
}
}
impl SliceContains for u8 {
#[inline]
fn slice_contains(&self, x: &[Self]) -> bool {
memchr::memchr(*self, x).is_some()
}
}
impl SliceContains for i8 {
#[inline]
fn slice_contains(&self, x: &[Self]) -> bool {
let byte = *self as u8;
// SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
// as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
// to be valid for reads for the length of the slice `x.len()`, which cannot be larger
// than `isize::MAX`. The returned slice is never mutated.
let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
memchr::memchr(byte, bytes).is_some()
}
}
|
equal
|
identifier_name
|
sequential.rs
|
// Copyright 2015 The tiny-http Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::Result as IoResult;
use std::io::{Read, Write};
use std::sync::mpsc::{Receiver, Sender};
use std::sync::mpsc::channel;
use std::sync::{Arc, Mutex};
use std::mem;
pub struct SequentialReaderBuilder<R> where R: Read + Send {
inner: SequentialReaderBuilderInner<R>,
}
enum SequentialReaderBuilderInner<R> where R: Read + Send {
First(R),
NotFirst(Receiver<R>),
}
pub struct SequentialReader<R> where R: Read + Send {
inner: SequentialReaderInner<R>,
next: Sender<R>,
}
enum SequentialReaderInner<R> where R: Read + Send {
MyTurn(R),
Waiting(Receiver<R>),
Empty,
}
pub struct SequentialWriterBuilder<W> where W: Write + Send {
writer: Arc<Mutex<W>>,
next_trigger: Option<Receiver<()>>,
}
pub struct SequentialWriter<W> where W: Write + Send {
trigger: Option<Receiver<()>>,
writer: Arc<Mutex<W>>,
on_finish: Sender<()>,
}
impl<R: Read + Send> SequentialReaderBuilder<R> {
pub fn new(reader: R) -> SequentialReaderBuilder<R> {
SequentialReaderBuilder {
inner: SequentialReaderBuilderInner::First(reader),
}
}
}
impl<W: Write + Send> SequentialWriterBuilder<W> {
pub fn new(writer: W) -> SequentialWriterBuilder<W> {
SequentialWriterBuilder {
writer: Arc::new(Mutex::new(writer)),
next_trigger: None,
}
}
}
|
type Item = SequentialReader<R>;
fn next(&mut self) -> Option<SequentialReader<R>> {
let (tx, rx) = channel();
let inner = mem::replace(&mut self.inner, SequentialReaderBuilderInner::NotFirst(rx));
match inner {
SequentialReaderBuilderInner::First(reader) => {
Some(SequentialReader {
inner: SequentialReaderInner::MyTurn(reader),
next: tx,
})
},
SequentialReaderBuilderInner::NotFirst(previous) => {
Some(SequentialReader {
inner: SequentialReaderInner::Waiting(previous),
next: tx,
})
},
}
}
}
impl<W: Write + Send> Iterator for SequentialWriterBuilder<W> {
type Item = SequentialWriter<W>;
fn next(&mut self) -> Option<SequentialWriter<W>> {
let (tx, rx) = channel();
let mut next_next_trigger = Some(rx);
::std::mem::swap(&mut next_next_trigger, &mut self.next_trigger);
Some(SequentialWriter {
trigger: next_next_trigger,
writer: self.writer.clone(),
on_finish: tx,
})
}
}
impl<R: Read + Send> Read for SequentialReader<R> {
fn read(&mut self, buf: &mut [u8]) -> IoResult<usize> {
let mut reader = match self.inner {
SequentialReaderInner::MyTurn(ref mut reader) => return reader.read(buf),
SequentialReaderInner::Waiting(ref mut recv) => recv.recv().unwrap(),
SequentialReaderInner::Empty => unreachable!(),
};
let result = reader.read(buf);
self.inner = SequentialReaderInner::MyTurn(reader);
result
}
}
impl<W: Write + Send> Write for SequentialWriter<W> {
fn write(&mut self, buf: &[u8]) -> IoResult<usize> {
self.trigger.as_mut().map(|v| v.recv().unwrap());
self.trigger = None;
self.writer.lock().unwrap().write(buf)
}
fn flush(&mut self) -> IoResult<()> {
self.trigger.as_mut().map(|v| v.recv().unwrap());
self.trigger = None;
self.writer.lock().unwrap().flush()
}
}
impl<R> Drop for SequentialReader<R> where R: Read + Send {
fn drop(&mut self) {
let inner = mem::replace(&mut self.inner, SequentialReaderInner::Empty);
match inner {
SequentialReaderInner::MyTurn(reader) => {
self.next.send(reader).ok();
},
SequentialReaderInner::Waiting(recv) => {
let reader = recv.recv().unwrap();
self.next.send(reader).ok();
},
SequentialReaderInner::Empty => (),
}
}
}
impl<W> Drop for SequentialWriter<W> where W: Write + Send {
fn drop(&mut self) {
self.on_finish.send(()).ok();
}
}
|
impl<R: Read + Send> Iterator for SequentialReaderBuilder<R> {
|
random_line_split
|
sequential.rs
|
// Copyright 2015 The tiny-http Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::Result as IoResult;
use std::io::{Read, Write};
use std::sync::mpsc::{Receiver, Sender};
use std::sync::mpsc::channel;
use std::sync::{Arc, Mutex};
use std::mem;
pub struct SequentialReaderBuilder<R> where R: Read + Send {
inner: SequentialReaderBuilderInner<R>,
}
enum SequentialReaderBuilderInner<R> where R: Read + Send {
First(R),
NotFirst(Receiver<R>),
}
pub struct SequentialReader<R> where R: Read + Send {
inner: SequentialReaderInner<R>,
next: Sender<R>,
}
enum SequentialReaderInner<R> where R: Read + Send {
MyTurn(R),
Waiting(Receiver<R>),
Empty,
}
pub struct SequentialWriterBuilder<W> where W: Write + Send {
writer: Arc<Mutex<W>>,
next_trigger: Option<Receiver<()>>,
}
pub struct SequentialWriter<W> where W: Write + Send {
trigger: Option<Receiver<()>>,
writer: Arc<Mutex<W>>,
on_finish: Sender<()>,
}
impl<R: Read + Send> SequentialReaderBuilder<R> {
pub fn new(reader: R) -> SequentialReaderBuilder<R>
|
}
impl<W: Write + Send> SequentialWriterBuilder<W> {
pub fn new(writer: W) -> SequentialWriterBuilder<W> {
SequentialWriterBuilder {
writer: Arc::new(Mutex::new(writer)),
next_trigger: None,
}
}
}
impl<R: Read + Send> Iterator for SequentialReaderBuilder<R> {
type Item = SequentialReader<R>;
fn next(&mut self) -> Option<SequentialReader<R>> {
let (tx, rx) = channel();
let inner = mem::replace(&mut self.inner, SequentialReaderBuilderInner::NotFirst(rx));
match inner {
SequentialReaderBuilderInner::First(reader) => {
Some(SequentialReader {
inner: SequentialReaderInner::MyTurn(reader),
next: tx,
})
},
SequentialReaderBuilderInner::NotFirst(previous) => {
Some(SequentialReader {
inner: SequentialReaderInner::Waiting(previous),
next: tx,
})
},
}
}
}
impl<W: Write + Send> Iterator for SequentialWriterBuilder<W> {
type Item = SequentialWriter<W>;
fn next(&mut self) -> Option<SequentialWriter<W>> {
let (tx, rx) = channel();
let mut next_next_trigger = Some(rx);
::std::mem::swap(&mut next_next_trigger, &mut self.next_trigger);
Some(SequentialWriter {
trigger: next_next_trigger,
writer: self.writer.clone(),
on_finish: tx,
})
}
}
impl<R: Read + Send> Read for SequentialReader<R> {
fn read(&mut self, buf: &mut [u8]) -> IoResult<usize> {
let mut reader = match self.inner {
SequentialReaderInner::MyTurn(ref mut reader) => return reader.read(buf),
SequentialReaderInner::Waiting(ref mut recv) => recv.recv().unwrap(),
SequentialReaderInner::Empty => unreachable!(),
};
let result = reader.read(buf);
self.inner = SequentialReaderInner::MyTurn(reader);
result
}
}
impl<W: Write + Send> Write for SequentialWriter<W> {
fn write(&mut self, buf: &[u8]) -> IoResult<usize> {
self.trigger.as_mut().map(|v| v.recv().unwrap());
self.trigger = None;
self.writer.lock().unwrap().write(buf)
}
fn flush(&mut self) -> IoResult<()> {
self.trigger.as_mut().map(|v| v.recv().unwrap());
self.trigger = None;
self.writer.lock().unwrap().flush()
}
}
impl<R> Drop for SequentialReader<R> where R: Read + Send {
fn drop(&mut self) {
let inner = mem::replace(&mut self.inner, SequentialReaderInner::Empty);
match inner {
SequentialReaderInner::MyTurn(reader) => {
self.next.send(reader).ok();
},
SequentialReaderInner::Waiting(recv) => {
let reader = recv.recv().unwrap();
self.next.send(reader).ok();
},
SequentialReaderInner::Empty => (),
}
}
}
impl<W> Drop for SequentialWriter<W> where W: Write + Send {
fn drop(&mut self) {
self.on_finish.send(()).ok();
}
}
|
{
SequentialReaderBuilder {
inner: SequentialReaderBuilderInner::First(reader),
}
}
|
identifier_body
|
sequential.rs
|
// Copyright 2015 The tiny-http Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::Result as IoResult;
use std::io::{Read, Write};
use std::sync::mpsc::{Receiver, Sender};
use std::sync::mpsc::channel;
use std::sync::{Arc, Mutex};
use std::mem;
pub struct SequentialReaderBuilder<R> where R: Read + Send {
inner: SequentialReaderBuilderInner<R>,
}
enum SequentialReaderBuilderInner<R> where R: Read + Send {
First(R),
NotFirst(Receiver<R>),
}
pub struct SequentialReader<R> where R: Read + Send {
inner: SequentialReaderInner<R>,
next: Sender<R>,
}
enum SequentialReaderInner<R> where R: Read + Send {
MyTurn(R),
Waiting(Receiver<R>),
Empty,
}
pub struct SequentialWriterBuilder<W> where W: Write + Send {
writer: Arc<Mutex<W>>,
next_trigger: Option<Receiver<()>>,
}
pub struct SequentialWriter<W> where W: Write + Send {
trigger: Option<Receiver<()>>,
writer: Arc<Mutex<W>>,
on_finish: Sender<()>,
}
impl<R: Read + Send> SequentialReaderBuilder<R> {
pub fn
|
(reader: R) -> SequentialReaderBuilder<R> {
SequentialReaderBuilder {
inner: SequentialReaderBuilderInner::First(reader),
}
}
}
impl<W: Write + Send> SequentialWriterBuilder<W> {
pub fn new(writer: W) -> SequentialWriterBuilder<W> {
SequentialWriterBuilder {
writer: Arc::new(Mutex::new(writer)),
next_trigger: None,
}
}
}
impl<R: Read + Send> Iterator for SequentialReaderBuilder<R> {
type Item = SequentialReader<R>;
fn next(&mut self) -> Option<SequentialReader<R>> {
let (tx, rx) = channel();
let inner = mem::replace(&mut self.inner, SequentialReaderBuilderInner::NotFirst(rx));
match inner {
SequentialReaderBuilderInner::First(reader) => {
Some(SequentialReader {
inner: SequentialReaderInner::MyTurn(reader),
next: tx,
})
},
SequentialReaderBuilderInner::NotFirst(previous) => {
Some(SequentialReader {
inner: SequentialReaderInner::Waiting(previous),
next: tx,
})
},
}
}
}
impl<W: Write + Send> Iterator for SequentialWriterBuilder<W> {
type Item = SequentialWriter<W>;
fn next(&mut self) -> Option<SequentialWriter<W>> {
let (tx, rx) = channel();
let mut next_next_trigger = Some(rx);
::std::mem::swap(&mut next_next_trigger, &mut self.next_trigger);
Some(SequentialWriter {
trigger: next_next_trigger,
writer: self.writer.clone(),
on_finish: tx,
})
}
}
impl<R: Read + Send> Read for SequentialReader<R> {
fn read(&mut self, buf: &mut [u8]) -> IoResult<usize> {
let mut reader = match self.inner {
SequentialReaderInner::MyTurn(ref mut reader) => return reader.read(buf),
SequentialReaderInner::Waiting(ref mut recv) => recv.recv().unwrap(),
SequentialReaderInner::Empty => unreachable!(),
};
let result = reader.read(buf);
self.inner = SequentialReaderInner::MyTurn(reader);
result
}
}
impl<W: Write + Send> Write for SequentialWriter<W> {
fn write(&mut self, buf: &[u8]) -> IoResult<usize> {
self.trigger.as_mut().map(|v| v.recv().unwrap());
self.trigger = None;
self.writer.lock().unwrap().write(buf)
}
fn flush(&mut self) -> IoResult<()> {
self.trigger.as_mut().map(|v| v.recv().unwrap());
self.trigger = None;
self.writer.lock().unwrap().flush()
}
}
impl<R> Drop for SequentialReader<R> where R: Read + Send {
fn drop(&mut self) {
let inner = mem::replace(&mut self.inner, SequentialReaderInner::Empty);
match inner {
SequentialReaderInner::MyTurn(reader) => {
self.next.send(reader).ok();
},
SequentialReaderInner::Waiting(recv) => {
let reader = recv.recv().unwrap();
self.next.send(reader).ok();
},
SequentialReaderInner::Empty => (),
}
}
}
impl<W> Drop for SequentialWriter<W> where W: Write + Send {
fn drop(&mut self) {
self.on_finish.send(()).ok();
}
}
|
new
|
identifier_name
|
buffer.rs
|
use std::ops;
use std::sync::Arc;
#[derive(Debug, Clone)]
pub struct Buffer {
pub uri: Option<String>,
pub name: Option<String>,
data: Vec<u8>
}
impl Buffer {
pub fn new(uri: Option<String>, name: Option<String>, data: Vec<u8>) -> Arc<Buffer> {
return Arc::new(Buffer { uri: uri, name: name, data: data });
}
#[inline(always)]
pub fn as_slice(&self) -> &[u8] {
return &self[..];
}
}
impl ops::Index<usize> for Buffer {
type Output = u8;
#[inline(always)]
fn index(&self, index: usize) -> &u8 {
return &self.data[index];
}
}
impl ops::Index<ops::Range<usize>> for Buffer {
type Output = [u8];
#[inline(always)]
fn index(&self, index: ops::Range<usize>) -> &[u8] {
return &self.data[index];
}
}
impl ops::Index<ops::RangeTo<usize>> for Buffer {
type Output = [u8];
#[inline(always)]
fn index(&self, index: ops::RangeTo<usize>) -> &[u8] {
return &self.data[index];
}
}
impl ops::Index<ops::RangeFrom<usize>> for Buffer {
type Output = [u8];
#[inline(always)]
fn index(&self, index: ops::RangeFrom<usize>) -> &[u8] {
return &self.data[index];
}
}
impl ops::Index<ops::RangeFull> for Buffer {
type Output = [u8];
#[inline(always)]
fn index(&self, index: ops::RangeFull) -> &[u8] {
return &self.data[index];
}
}
impl ops::Deref for Buffer {
type Target = [u8];
fn deref(&self) -> &[u8] {
return self.data.deref();
}
}
|
#[test]
fn test_as_slice() {
let buffer = Buffer::new(None, None, vec![0, 1, 2, 3]);
assert_eq!(buffer.as_slice(), &[0, 1, 2, 3]);
}
#[test]
fn test_deref() {
let buffer = Buffer::new(None, None, vec![0, 1, 2, 3]);
assert_eq!(&**buffer, &[0, 1, 2, 3]);
}
#[test]
fn test_index() {
let buffer = Buffer::new(None, None, vec![0, 1, 2, 3]);
assert_eq!(&buffer[..], &[0, 1, 2, 3]);
assert_eq!(&buffer[1.. 3], &[1, 2]);
assert_eq!(&buffer[.. 2], &[0, 1]);
assert_eq!(&buffer[2..], &[2, 3]);
}
}
|
#[cfg(test)]
mod tests {
use super::*;
|
random_line_split
|
buffer.rs
|
use std::ops;
use std::sync::Arc;
#[derive(Debug, Clone)]
pub struct Buffer {
pub uri: Option<String>,
pub name: Option<String>,
data: Vec<u8>
}
impl Buffer {
pub fn new(uri: Option<String>, name: Option<String>, data: Vec<u8>) -> Arc<Buffer> {
return Arc::new(Buffer { uri: uri, name: name, data: data });
}
#[inline(always)]
pub fn
|
(&self) -> &[u8] {
return &self[..];
}
}
impl ops::Index<usize> for Buffer {
type Output = u8;
#[inline(always)]
fn index(&self, index: usize) -> &u8 {
return &self.data[index];
}
}
impl ops::Index<ops::Range<usize>> for Buffer {
type Output = [u8];
#[inline(always)]
fn index(&self, index: ops::Range<usize>) -> &[u8] {
return &self.data[index];
}
}
impl ops::Index<ops::RangeTo<usize>> for Buffer {
type Output = [u8];
#[inline(always)]
fn index(&self, index: ops::RangeTo<usize>) -> &[u8] {
return &self.data[index];
}
}
impl ops::Index<ops::RangeFrom<usize>> for Buffer {
type Output = [u8];
#[inline(always)]
fn index(&self, index: ops::RangeFrom<usize>) -> &[u8] {
return &self.data[index];
}
}
impl ops::Index<ops::RangeFull> for Buffer {
type Output = [u8];
#[inline(always)]
fn index(&self, index: ops::RangeFull) -> &[u8] {
return &self.data[index];
}
}
impl ops::Deref for Buffer {
type Target = [u8];
fn deref(&self) -> &[u8] {
return self.data.deref();
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_as_slice() {
let buffer = Buffer::new(None, None, vec![0, 1, 2, 3]);
assert_eq!(buffer.as_slice(), &[0, 1, 2, 3]);
}
#[test]
fn test_deref() {
let buffer = Buffer::new(None, None, vec![0, 1, 2, 3]);
assert_eq!(&**buffer, &[0, 1, 2, 3]);
}
#[test]
fn test_index() {
let buffer = Buffer::new(None, None, vec![0, 1, 2, 3]);
assert_eq!(&buffer[..], &[0, 1, 2, 3]);
assert_eq!(&buffer[1.. 3], &[1, 2]);
assert_eq!(&buffer[.. 2], &[0, 1]);
assert_eq!(&buffer[2..], &[2, 3]);
}
}
|
as_slice
|
identifier_name
|
buffer.rs
|
use std::ops;
use std::sync::Arc;
#[derive(Debug, Clone)]
pub struct Buffer {
pub uri: Option<String>,
pub name: Option<String>,
data: Vec<u8>
}
impl Buffer {
pub fn new(uri: Option<String>, name: Option<String>, data: Vec<u8>) -> Arc<Buffer> {
return Arc::new(Buffer { uri: uri, name: name, data: data });
}
#[inline(always)]
pub fn as_slice(&self) -> &[u8] {
return &self[..];
}
}
impl ops::Index<usize> for Buffer {
type Output = u8;
#[inline(always)]
fn index(&self, index: usize) -> &u8 {
return &self.data[index];
}
}
impl ops::Index<ops::Range<usize>> for Buffer {
type Output = [u8];
#[inline(always)]
fn index(&self, index: ops::Range<usize>) -> &[u8] {
return &self.data[index];
}
}
impl ops::Index<ops::RangeTo<usize>> for Buffer {
type Output = [u8];
#[inline(always)]
fn index(&self, index: ops::RangeTo<usize>) -> &[u8] {
return &self.data[index];
}
}
impl ops::Index<ops::RangeFrom<usize>> for Buffer {
type Output = [u8];
#[inline(always)]
fn index(&self, index: ops::RangeFrom<usize>) -> &[u8] {
return &self.data[index];
}
}
impl ops::Index<ops::RangeFull> for Buffer {
type Output = [u8];
#[inline(always)]
fn index(&self, index: ops::RangeFull) -> &[u8] {
return &self.data[index];
}
}
impl ops::Deref for Buffer {
type Target = [u8];
fn deref(&self) -> &[u8]
|
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_as_slice() {
let buffer = Buffer::new(None, None, vec![0, 1, 2, 3]);
assert_eq!(buffer.as_slice(), &[0, 1, 2, 3]);
}
#[test]
fn test_deref() {
let buffer = Buffer::new(None, None, vec![0, 1, 2, 3]);
assert_eq!(&**buffer, &[0, 1, 2, 3]);
}
#[test]
fn test_index() {
let buffer = Buffer::new(None, None, vec![0, 1, 2, 3]);
assert_eq!(&buffer[..], &[0, 1, 2, 3]);
assert_eq!(&buffer[1.. 3], &[1, 2]);
assert_eq!(&buffer[.. 2], &[0, 1]);
assert_eq!(&buffer[2..], &[2, 3]);
}
}
|
{
return self.data.deref();
}
|
identifier_body
|
lib.rs
|
#![warn(clippy::pedantic)]
#![allow(clippy::clippy::cast_possible_truncation)]
#![allow(clippy::inline_always)]
#![allow(clippy::cast_precision_loss)]
#![allow(clippy::shadow_unrelated)]
#![allow(clippy::module_name_repetitions)]
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::cast_sign_loss)]
#![allow(clippy::map_err_ignore)]
#![allow(clippy::upper_case_acronyms)]
#[cfg(feature = "diesel-support")]
#[macro_use]
extern crate diesel;
pub mod err;
pub mod local;
pub mod remote;
pub use err::{Error, Result};
/// Represents the type of a series.
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]
pub enum
|
{
/// TV episodes.
Season,
Movie,
Special,
OVA,
ONA,
Music,
}
impl Into<&'static str> for SeriesKind {
fn into(self) -> &'static str {
match self {
Self::Season => "Season",
Self::Movie => "Movie",
Self::Special => "Special",
Self::OVA => "OVA",
Self::ONA => "ONA",
Self::Music => "Music",
}
}
}
/// Find the best matching item in `items` via `matcher` and return it if the maximum confidence is greater than `min_confidence`.
///
/// `min_confidence` should be a value between 0.0 and 1.0.
///
/// `matcher` is used to compare each item in `items`. When returning Some, its value should be between 0.0 and 1.0.
/// This value represents the "confidence" (or similarity) between the item and some other value.
///
/// If `matcher` returns a confidence greater than 0.99, that item will be immediately returned.
pub fn closest_match<I, T, F>(items: I, min_confidence: f32, matcher: F) -> Option<(usize, T)>
where
I: IntoIterator<Item = T>,
F: Fn(&T) -> Option<f32>,
{
let mut max_score = 0.0;
let mut best_match = None;
for (i, item) in items.into_iter().enumerate() {
let score = match matcher(&item) {
Some(score) => score,
None => continue,
};
if score > max_score {
best_match = Some((i, item));
if score > 0.99 {
return best_match;
}
max_score = score;
}
}
if max_score < min_confidence {
return None;
}
best_match
}
|
SeriesKind
|
identifier_name
|
lib.rs
|
#![warn(clippy::pedantic)]
#![allow(clippy::clippy::cast_possible_truncation)]
#![allow(clippy::inline_always)]
#![allow(clippy::cast_precision_loss)]
#![allow(clippy::shadow_unrelated)]
#![allow(clippy::module_name_repetitions)]
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::cast_sign_loss)]
#![allow(clippy::map_err_ignore)]
#![allow(clippy::upper_case_acronyms)]
#[cfg(feature = "diesel-support")]
#[macro_use]
extern crate diesel;
pub mod err;
pub mod local;
pub mod remote;
pub use err::{Error, Result};
/// Represents the type of a series.
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]
pub enum SeriesKind {
/// TV episodes.
Season,
Movie,
Special,
OVA,
ONA,
Music,
}
impl Into<&'static str> for SeriesKind {
fn into(self) -> &'static str {
match self {
Self::Season => "Season",
Self::Movie => "Movie",
Self::Special => "Special",
Self::OVA => "OVA",
Self::ONA => "ONA",
Self::Music => "Music",
}
}
}
/// Find the best matching item in `items` via `matcher` and return it if the maximum confidence is greater than `min_confidence`.
///
/// `min_confidence` should be a value between 0.0 and 1.0.
///
/// `matcher` is used to compare each item in `items`. When returning Some, its value should be between 0.0 and 1.0.
/// This value represents the "confidence" (or similarity) between the item and some other value.
///
/// If `matcher` returns a confidence greater than 0.99, that item will be immediately returned.
pub fn closest_match<I, T, F>(items: I, min_confidence: f32, matcher: F) -> Option<(usize, T)>
where
I: IntoIterator<Item = T>,
F: Fn(&T) -> Option<f32>,
|
if max_score < min_confidence {
return None;
}
best_match
}
|
{
let mut max_score = 0.0;
let mut best_match = None;
for (i, item) in items.into_iter().enumerate() {
let score = match matcher(&item) {
Some(score) => score,
None => continue,
};
if score > max_score {
best_match = Some((i, item));
if score > 0.99 {
return best_match;
}
max_score = score;
}
}
|
identifier_body
|
lib.rs
|
#![warn(clippy::pedantic)]
#![allow(clippy::clippy::cast_possible_truncation)]
#![allow(clippy::inline_always)]
#![allow(clippy::cast_precision_loss)]
#![allow(clippy::shadow_unrelated)]
#![allow(clippy::module_name_repetitions)]
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::cast_sign_loss)]
#![allow(clippy::map_err_ignore)]
#![allow(clippy::upper_case_acronyms)]
#[cfg(feature = "diesel-support")]
#[macro_use]
extern crate diesel;
pub mod err;
pub mod local;
pub mod remote;
pub use err::{Error, Result};
/// Represents the type of a series.
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]
pub enum SeriesKind {
/// TV episodes.
Season,
Movie,
Special,
OVA,
ONA,
Music,
}
impl Into<&'static str> for SeriesKind {
fn into(self) -> &'static str {
match self {
Self::Season => "Season",
Self::Movie => "Movie",
Self::Special => "Special",
Self::OVA => "OVA",
Self::ONA => "ONA",
Self::Music => "Music",
}
}
}
/// Find the best matching item in `items` via `matcher` and return it if the maximum confidence is greater than `min_confidence`.
///
/// `min_confidence` should be a value between 0.0 and 1.0.
///
/// `matcher` is used to compare each item in `items`. When returning Some, its value should be between 0.0 and 1.0.
/// This value represents the "confidence" (or similarity) between the item and some other value.
///
/// If `matcher` returns a confidence greater than 0.99, that item will be immediately returned.
pub fn closest_match<I, T, F>(items: I, min_confidence: f32, matcher: F) -> Option<(usize, T)>
where
I: IntoIterator<Item = T>,
F: Fn(&T) -> Option<f32>,
{
let mut max_score = 0.0;
let mut best_match = None;
for (i, item) in items.into_iter().enumerate() {
let score = match matcher(&item) {
Some(score) => score,
None => continue,
};
if score > max_score {
best_match = Some((i, item));
if score > 0.99 {
return best_match;
|
max_score = score;
}
}
if max_score < min_confidence {
return None;
}
best_match
}
|
}
|
random_line_split
|
test_delete_stream.rs
|
extern crate serde;
extern crate serde_json;
extern crate uuid;
extern crate http_event_store as hes;
#[macro_use]
mod support;
use support::task_domain::*;
use hes::write::Event;
use hes::read::BodyEntry;
use hes::client::Client;
use hes::expected_version::ExpectedVersion;
use hes::error::ApiError::*;
|
let stream_name = test_stream_name();
client.append_to_stream(&stream_name, ExpectedVersion::NoStream, events).unwrap();
assert!(client.read_stream_events_forward::<BodyEntry>(&stream_name, 0, 1, true).is_ok());
assert!(client.delete_stream(&stream_name, ExpectedVersion::Any).is_ok());
let result = client.read_stream_events_forward::<BodyEntry>(&stream_name, 0, 1, true);
assert_error!(StreamNotFound(..), result.unwrap_err());
}
#[test]
fn should_fail_if_expected_version_is_not_correct() {
let events: Vec<Event> = vec![TaskCreated { name: "Created".to_string(), event_id: uuid::Uuid::new_v4() }.into()];
let client = Client::default();
let stream_name = test_stream_name();
client.append_to_stream(&stream_name, ExpectedVersion::NoStream, events).unwrap();
let result = client.delete_stream(&stream_name, ExpectedVersion::NoStream);
assert_error!(WrongExpectedEventNumber(..), result.unwrap_err());
}
fn test_stream_name() -> String {
format!("task-{}", uuid::Uuid::new_v4().simple())
}
|
#[test]
fn should_delete_stream() {
let events: Vec<Event> = vec![TaskCreated { name: "Created".to_string(), event_id: uuid::Uuid::new_v4() }.into()];
let client = Client::default();
|
random_line_split
|
test_delete_stream.rs
|
extern crate serde;
extern crate serde_json;
extern crate uuid;
extern crate http_event_store as hes;
#[macro_use]
mod support;
use support::task_domain::*;
use hes::write::Event;
use hes::read::BodyEntry;
use hes::client::Client;
use hes::expected_version::ExpectedVersion;
use hes::error::ApiError::*;
#[test]
fn
|
() {
let events: Vec<Event> = vec![TaskCreated { name: "Created".to_string(), event_id: uuid::Uuid::new_v4() }.into()];
let client = Client::default();
let stream_name = test_stream_name();
client.append_to_stream(&stream_name, ExpectedVersion::NoStream, events).unwrap();
assert!(client.read_stream_events_forward::<BodyEntry>(&stream_name, 0, 1, true).is_ok());
assert!(client.delete_stream(&stream_name, ExpectedVersion::Any).is_ok());
let result = client.read_stream_events_forward::<BodyEntry>(&stream_name, 0, 1, true);
assert_error!(StreamNotFound(..), result.unwrap_err());
}
#[test]
fn should_fail_if_expected_version_is_not_correct() {
let events: Vec<Event> = vec![TaskCreated { name: "Created".to_string(), event_id: uuid::Uuid::new_v4() }.into()];
let client = Client::default();
let stream_name = test_stream_name();
client.append_to_stream(&stream_name, ExpectedVersion::NoStream, events).unwrap();
let result = client.delete_stream(&stream_name, ExpectedVersion::NoStream);
assert_error!(WrongExpectedEventNumber(..), result.unwrap_err());
}
fn test_stream_name() -> String {
format!("task-{}", uuid::Uuid::new_v4().simple())
}
|
should_delete_stream
|
identifier_name
|
test_delete_stream.rs
|
extern crate serde;
extern crate serde_json;
extern crate uuid;
extern crate http_event_store as hes;
#[macro_use]
mod support;
use support::task_domain::*;
use hes::write::Event;
use hes::read::BodyEntry;
use hes::client::Client;
use hes::expected_version::ExpectedVersion;
use hes::error::ApiError::*;
#[test]
fn should_delete_stream() {
let events: Vec<Event> = vec![TaskCreated { name: "Created".to_string(), event_id: uuid::Uuid::new_v4() }.into()];
let client = Client::default();
let stream_name = test_stream_name();
client.append_to_stream(&stream_name, ExpectedVersion::NoStream, events).unwrap();
assert!(client.read_stream_events_forward::<BodyEntry>(&stream_name, 0, 1, true).is_ok());
assert!(client.delete_stream(&stream_name, ExpectedVersion::Any).is_ok());
let result = client.read_stream_events_forward::<BodyEntry>(&stream_name, 0, 1, true);
assert_error!(StreamNotFound(..), result.unwrap_err());
}
#[test]
fn should_fail_if_expected_version_is_not_correct()
|
fn test_stream_name() -> String {
format!("task-{}", uuid::Uuid::new_v4().simple())
}
|
{
let events: Vec<Event> = vec![TaskCreated { name: "Created".to_string(), event_id: uuid::Uuid::new_v4() }.into()];
let client = Client::default();
let stream_name = test_stream_name();
client.append_to_stream(&stream_name, ExpectedVersion::NoStream, events).unwrap();
let result = client.delete_stream(&stream_name, ExpectedVersion::NoStream);
assert_error!(WrongExpectedEventNumber(..), result.unwrap_err());
}
|
identifier_body
|
constants.rs
|
use crate::{
analysis::imports::Imports,
codegen::general::{
self, cfg_condition, cfg_deprecated, doc_alias, version_condition, version_condition_string,
},
env::Env,
file_saver, library,
};
use std::path::Path;
pub fn
|
(env: &Env, root_path: &Path, mod_rs: &mut Vec<String>) {
let path = root_path.join("constants.rs");
let mut imports = Imports::new(&env.library);
if env.analysis.constants.is_empty() {
return;
}
let sys_crate_name = env.main_sys_crate_name();
imports.add("std::ffi::CStr");
file_saver::save_to_file(path, env.config.make_backup, |w| {
general::start_comments(w, &env.config)?;
general::uses(w, env, &imports, None)?;
writeln!(w)?;
mod_rs.push("\nmod constants;".into());
for constant in &env.analysis.constants {
let type_ = env.type_(constant.typ);
if let library::Type::Fundamental(library::Fundamental::Utf8) = type_ {
cfg_deprecated(w, env, None, constant.deprecated_version, false, 0)?;
cfg_condition(w, constant.cfg_condition.as_ref(), false, 0)?;
version_condition(w, env, None, constant.version, false, 0)?;
doc_alias(w, &constant.glib_name, "", 0)?;
writeln!(
w,
"pub static {name}: once_cell::sync::Lazy<&'static str> = once_cell::sync::Lazy::new(|| \
unsafe{{CStr::from_ptr({sys_crate_name}::{c_id}).to_str().unwrap()}});",
sys_crate_name = sys_crate_name,
name = constant.name,
c_id = constant.glib_name
)?;
if let Some(cfg) = version_condition_string(env, None, constant.version, false, 0) {
mod_rs.push(cfg);
}
mod_rs.push(format!("pub use self::constants::{};", constant.name));
}
}
Ok(())
});
}
|
generate
|
identifier_name
|
constants.rs
|
use crate::{
analysis::imports::Imports,
codegen::general::{
self, cfg_condition, cfg_deprecated, doc_alias, version_condition, version_condition_string,
},
env::Env,
file_saver, library,
};
use std::path::Path;
pub fn generate(env: &Env, root_path: &Path, mod_rs: &mut Vec<String>)
|
if let library::Type::Fundamental(library::Fundamental::Utf8) = type_ {
cfg_deprecated(w, env, None, constant.deprecated_version, false, 0)?;
cfg_condition(w, constant.cfg_condition.as_ref(), false, 0)?;
version_condition(w, env, None, constant.version, false, 0)?;
doc_alias(w, &constant.glib_name, "", 0)?;
writeln!(
w,
"pub static {name}: once_cell::sync::Lazy<&'static str> = once_cell::sync::Lazy::new(|| \
unsafe{{CStr::from_ptr({sys_crate_name}::{c_id}).to_str().unwrap()}});",
sys_crate_name = sys_crate_name,
name = constant.name,
c_id = constant.glib_name
)?;
if let Some(cfg) = version_condition_string(env, None, constant.version, false, 0) {
mod_rs.push(cfg);
}
mod_rs.push(format!("pub use self::constants::{};", constant.name));
}
}
Ok(())
});
}
|
{
let path = root_path.join("constants.rs");
let mut imports = Imports::new(&env.library);
if env.analysis.constants.is_empty() {
return;
}
let sys_crate_name = env.main_sys_crate_name();
imports.add("std::ffi::CStr");
file_saver::save_to_file(path, env.config.make_backup, |w| {
general::start_comments(w, &env.config)?;
general::uses(w, env, &imports, None)?;
writeln!(w)?;
mod_rs.push("\nmod constants;".into());
for constant in &env.analysis.constants {
let type_ = env.type_(constant.typ);
|
identifier_body
|
constants.rs
|
use crate::{
analysis::imports::Imports,
codegen::general::{
self, cfg_condition, cfg_deprecated, doc_alias, version_condition, version_condition_string,
},
env::Env,
file_saver, library,
};
use std::path::Path;
pub fn generate(env: &Env, root_path: &Path, mod_rs: &mut Vec<String>) {
let path = root_path.join("constants.rs");
|
if env.analysis.constants.is_empty() {
return;
}
let sys_crate_name = env.main_sys_crate_name();
imports.add("std::ffi::CStr");
file_saver::save_to_file(path, env.config.make_backup, |w| {
general::start_comments(w, &env.config)?;
general::uses(w, env, &imports, None)?;
writeln!(w)?;
mod_rs.push("\nmod constants;".into());
for constant in &env.analysis.constants {
let type_ = env.type_(constant.typ);
if let library::Type::Fundamental(library::Fundamental::Utf8) = type_ {
cfg_deprecated(w, env, None, constant.deprecated_version, false, 0)?;
cfg_condition(w, constant.cfg_condition.as_ref(), false, 0)?;
version_condition(w, env, None, constant.version, false, 0)?;
doc_alias(w, &constant.glib_name, "", 0)?;
writeln!(
w,
"pub static {name}: once_cell::sync::Lazy<&'static str> = once_cell::sync::Lazy::new(|| \
unsafe{{CStr::from_ptr({sys_crate_name}::{c_id}).to_str().unwrap()}});",
sys_crate_name = sys_crate_name,
name = constant.name,
c_id = constant.glib_name
)?;
if let Some(cfg) = version_condition_string(env, None, constant.version, false, 0) {
mod_rs.push(cfg);
}
mod_rs.push(format!("pub use self::constants::{};", constant.name));
}
}
Ok(())
});
}
|
let mut imports = Imports::new(&env.library);
|
random_line_split
|
constants.rs
|
use crate::{
analysis::imports::Imports,
codegen::general::{
self, cfg_condition, cfg_deprecated, doc_alias, version_condition, version_condition_string,
},
env::Env,
file_saver, library,
};
use std::path::Path;
pub fn generate(env: &Env, root_path: &Path, mod_rs: &mut Vec<String>) {
let path = root_path.join("constants.rs");
let mut imports = Imports::new(&env.library);
if env.analysis.constants.is_empty() {
return;
}
let sys_crate_name = env.main_sys_crate_name();
imports.add("std::ffi::CStr");
file_saver::save_to_file(path, env.config.make_backup, |w| {
general::start_comments(w, &env.config)?;
general::uses(w, env, &imports, None)?;
writeln!(w)?;
mod_rs.push("\nmod constants;".into());
for constant in &env.analysis.constants {
let type_ = env.type_(constant.typ);
if let library::Type::Fundamental(library::Fundamental::Utf8) = type_ {
cfg_deprecated(w, env, None, constant.deprecated_version, false, 0)?;
cfg_condition(w, constant.cfg_condition.as_ref(), false, 0)?;
version_condition(w, env, None, constant.version, false, 0)?;
doc_alias(w, &constant.glib_name, "", 0)?;
writeln!(
w,
"pub static {name}: once_cell::sync::Lazy<&'static str> = once_cell::sync::Lazy::new(|| \
unsafe{{CStr::from_ptr({sys_crate_name}::{c_id}).to_str().unwrap()}});",
sys_crate_name = sys_crate_name,
name = constant.name,
c_id = constant.glib_name
)?;
if let Some(cfg) = version_condition_string(env, None, constant.version, false, 0)
|
mod_rs.push(format!("pub use self::constants::{};", constant.name));
}
}
Ok(())
});
}
|
{
mod_rs.push(cfg);
}
|
conditional_block
|
validation.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use crate::setcommon::{add_generations_by_bonsai, BonsaiInputStream};
use crate::BonsaiNodeStream;
use anyhow::Error;
use changeset_fetcher::ArcChangesetFetcher;
use context::CoreContext;
use futures_ext::StreamExt;
use futures_old::stream::Stream;
use futures_old::{Async, Poll};
use mononoke_types::ChangesetId;
use mononoke_types::Generation;
use std::collections::HashSet;
/// A wrapper around a NodeStream that asserts that the two revset invariants hold:
/// 1. The generation number never increases
/// 2. No hash is seen twice
/// This uses memory proportional to the number of hashes in the revset.
pub struct ValidateNodeStream {
wrapped: BonsaiInputStream,
last_generation: Option<Generation>,
seen_hashes: HashSet<ChangesetId>,
}
impl ValidateNodeStream {
pub fn new(
ctx: CoreContext,
wrapped: BonsaiNodeStream,
changeset_fetcher: &ArcChangesetFetcher,
) -> ValidateNodeStream {
ValidateNodeStream {
wrapped: add_generations_by_bonsai(ctx, wrapped, changeset_fetcher.clone()).boxify(),
last_generation: None,
seen_hashes: HashSet::new(),
}
}
}
impl Stream for ValidateNodeStream {
type Item = ChangesetId;
type Error = Error;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
let next = self.wrapped.poll()?;
let (hash, gen) = match next {
Async::NotReady => return Ok(Async::NotReady),
Async::Ready(None) => return Ok(Async::Ready(None)),
Async::Ready(Some((hash, gen))) => (hash, gen),
};
assert!(self.seen_hashes.insert(hash), "Hash {} seen twice", hash);
assert!(
self.last_generation.is_none() || self.last_generation >= Some(gen),
"Generation number increased unexpectedly"
);
self.last_generation = Some(gen);
Ok(Async::Ready(Some(hash)))
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::fixtures::linear;
use crate::setcommon::NotReadyEmptyStream;
use crate::tests::TestChangesetFetcher;
use fbinit::FacebookInit;
use futures::{compat::Stream01CompatExt, stream::StreamExt as _};
use futures_ext::StreamExt;
use revset_test_helper::{assert_changesets_sequence, single_changeset_id, string_to_bonsai};
use std::sync::Arc;
#[fbinit::test]
async fn validate_accepts_single_node(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let head_csid =
string_to_bonsai(fb, &repo, "a5ffa77602a066db7d5cfb9fb5823a0895717c5a").await;
let nodestream = single_changeset_id(ctx.clone(), head_csid.clone(), &repo).boxify();
let nodestream =
ValidateNodeStream::new(ctx.clone(), nodestream, &changeset_fetcher).boxify();
assert_changesets_sequence(ctx, &repo, vec![head_csid], nodestream).await;
}
#[fbinit::test]
async fn slow_ready_validates(fb: FacebookInit) {
// Tests that we handle an input staying at NotReady for a while without panicking
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher = Arc::new(TestChangesetFetcher::new(repo));
let mut nodestream = ValidateNodeStream::new(
ctx,
NotReadyEmptyStream::new(10).boxify(),
&changeset_fetcher,
)
.compat();
assert!(nodestream.next().await.is_none());
}
#[fbinit::test]
#[should_panic]
async fn repeat_hash_panics(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = Arc::new(linear::getrepo(fb).await);
let head_csid =
string_to_bonsai(fb, &repo, "a5ffa77602a066db7d5cfb9fb5823a0895717c5a").await;
let nodestream = single_changeset_id(ctx.clone(), head_csid.clone(), &repo)
.chain(single_changeset_id(ctx.clone(), head_csid.clone(), &repo));
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new((*repo).clone()));
let mut nodestream =
ValidateNodeStream::new(ctx, nodestream.boxify(), &changeset_fetcher).boxify();
loop {
match nodestream.poll() {
Ok(Async::Ready(None)) => return,
_ => {}
}
}
}
#[fbinit::test]
#[should_panic]
async fn
|
(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = Arc::new(linear::getrepo(fb).await);
let nodestream = single_changeset_id(
ctx.clone(),
string_to_bonsai(fb, &repo, "cb15ca4a43a59acff5388cea9648c162afde8372")
.await
.clone(),
&repo,
)
.chain(single_changeset_id(
ctx.clone(),
string_to_bonsai(fb, &repo, "3c15267ebf11807f3d772eb891272b911ec68759").await,
&repo,
));
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new((*repo).clone()));
let mut nodestream =
ValidateNodeStream::new(ctx, nodestream.boxify(), &changeset_fetcher).boxify();
loop {
match nodestream.poll() {
Ok(Async::Ready(None)) => return,
_ => {}
}
}
}
}
|
wrong_order_panics
|
identifier_name
|
validation.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use crate::setcommon::{add_generations_by_bonsai, BonsaiInputStream};
use crate::BonsaiNodeStream;
use anyhow::Error;
use changeset_fetcher::ArcChangesetFetcher;
use context::CoreContext;
use futures_ext::StreamExt;
use futures_old::stream::Stream;
use futures_old::{Async, Poll};
use mononoke_types::ChangesetId;
use mononoke_types::Generation;
use std::collections::HashSet;
/// A wrapper around a NodeStream that asserts that the two revset invariants hold:
/// 1. The generation number never increases
/// 2. No hash is seen twice
/// This uses memory proportional to the number of hashes in the revset.
pub struct ValidateNodeStream {
wrapped: BonsaiInputStream,
last_generation: Option<Generation>,
seen_hashes: HashSet<ChangesetId>,
}
impl ValidateNodeStream {
pub fn new(
ctx: CoreContext,
wrapped: BonsaiNodeStream,
changeset_fetcher: &ArcChangesetFetcher,
) -> ValidateNodeStream {
ValidateNodeStream {
wrapped: add_generations_by_bonsai(ctx, wrapped, changeset_fetcher.clone()).boxify(),
last_generation: None,
seen_hashes: HashSet::new(),
}
}
}
impl Stream for ValidateNodeStream {
type Item = ChangesetId;
type Error = Error;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
let next = self.wrapped.poll()?;
let (hash, gen) = match next {
Async::NotReady => return Ok(Async::NotReady),
Async::Ready(None) => return Ok(Async::Ready(None)),
Async::Ready(Some((hash, gen))) => (hash, gen),
};
assert!(self.seen_hashes.insert(hash), "Hash {} seen twice", hash);
assert!(
self.last_generation.is_none() || self.last_generation >= Some(gen),
"Generation number increased unexpectedly"
|
self.last_generation = Some(gen);
Ok(Async::Ready(Some(hash)))
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::fixtures::linear;
use crate::setcommon::NotReadyEmptyStream;
use crate::tests::TestChangesetFetcher;
use fbinit::FacebookInit;
use futures::{compat::Stream01CompatExt, stream::StreamExt as _};
use futures_ext::StreamExt;
use revset_test_helper::{assert_changesets_sequence, single_changeset_id, string_to_bonsai};
use std::sync::Arc;
#[fbinit::test]
async fn validate_accepts_single_node(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let head_csid =
string_to_bonsai(fb, &repo, "a5ffa77602a066db7d5cfb9fb5823a0895717c5a").await;
let nodestream = single_changeset_id(ctx.clone(), head_csid.clone(), &repo).boxify();
let nodestream =
ValidateNodeStream::new(ctx.clone(), nodestream, &changeset_fetcher).boxify();
assert_changesets_sequence(ctx, &repo, vec![head_csid], nodestream).await;
}
#[fbinit::test]
async fn slow_ready_validates(fb: FacebookInit) {
// Tests that we handle an input staying at NotReady for a while without panicking
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher = Arc::new(TestChangesetFetcher::new(repo));
let mut nodestream = ValidateNodeStream::new(
ctx,
NotReadyEmptyStream::new(10).boxify(),
&changeset_fetcher,
)
.compat();
assert!(nodestream.next().await.is_none());
}
#[fbinit::test]
#[should_panic]
async fn repeat_hash_panics(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = Arc::new(linear::getrepo(fb).await);
let head_csid =
string_to_bonsai(fb, &repo, "a5ffa77602a066db7d5cfb9fb5823a0895717c5a").await;
let nodestream = single_changeset_id(ctx.clone(), head_csid.clone(), &repo)
.chain(single_changeset_id(ctx.clone(), head_csid.clone(), &repo));
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new((*repo).clone()));
let mut nodestream =
ValidateNodeStream::new(ctx, nodestream.boxify(), &changeset_fetcher).boxify();
loop {
match nodestream.poll() {
Ok(Async::Ready(None)) => return,
_ => {}
}
}
}
#[fbinit::test]
#[should_panic]
async fn wrong_order_panics(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = Arc::new(linear::getrepo(fb).await);
let nodestream = single_changeset_id(
ctx.clone(),
string_to_bonsai(fb, &repo, "cb15ca4a43a59acff5388cea9648c162afde8372")
.await
.clone(),
&repo,
)
.chain(single_changeset_id(
ctx.clone(),
string_to_bonsai(fb, &repo, "3c15267ebf11807f3d772eb891272b911ec68759").await,
&repo,
));
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new((*repo).clone()));
let mut nodestream =
ValidateNodeStream::new(ctx, nodestream.boxify(), &changeset_fetcher).boxify();
loop {
match nodestream.poll() {
Ok(Async::Ready(None)) => return,
_ => {}
}
}
}
}
|
);
|
random_line_split
|
validation.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use crate::setcommon::{add_generations_by_bonsai, BonsaiInputStream};
use crate::BonsaiNodeStream;
use anyhow::Error;
use changeset_fetcher::ArcChangesetFetcher;
use context::CoreContext;
use futures_ext::StreamExt;
use futures_old::stream::Stream;
use futures_old::{Async, Poll};
use mononoke_types::ChangesetId;
use mononoke_types::Generation;
use std::collections::HashSet;
/// A wrapper around a NodeStream that asserts that the two revset invariants hold:
/// 1. The generation number never increases
/// 2. No hash is seen twice
/// This uses memory proportional to the number of hashes in the revset.
pub struct ValidateNodeStream {
wrapped: BonsaiInputStream,
last_generation: Option<Generation>,
seen_hashes: HashSet<ChangesetId>,
}
impl ValidateNodeStream {
pub fn new(
ctx: CoreContext,
wrapped: BonsaiNodeStream,
changeset_fetcher: &ArcChangesetFetcher,
) -> ValidateNodeStream {
ValidateNodeStream {
wrapped: add_generations_by_bonsai(ctx, wrapped, changeset_fetcher.clone()).boxify(),
last_generation: None,
seen_hashes: HashSet::new(),
}
}
}
impl Stream for ValidateNodeStream {
type Item = ChangesetId;
type Error = Error;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
let next = self.wrapped.poll()?;
let (hash, gen) = match next {
Async::NotReady => return Ok(Async::NotReady),
Async::Ready(None) => return Ok(Async::Ready(None)),
Async::Ready(Some((hash, gen))) => (hash, gen),
};
assert!(self.seen_hashes.insert(hash), "Hash {} seen twice", hash);
assert!(
self.last_generation.is_none() || self.last_generation >= Some(gen),
"Generation number increased unexpectedly"
);
self.last_generation = Some(gen);
Ok(Async::Ready(Some(hash)))
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::fixtures::linear;
use crate::setcommon::NotReadyEmptyStream;
use crate::tests::TestChangesetFetcher;
use fbinit::FacebookInit;
use futures::{compat::Stream01CompatExt, stream::StreamExt as _};
use futures_ext::StreamExt;
use revset_test_helper::{assert_changesets_sequence, single_changeset_id, string_to_bonsai};
use std::sync::Arc;
#[fbinit::test]
async fn validate_accepts_single_node(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let head_csid =
string_to_bonsai(fb, &repo, "a5ffa77602a066db7d5cfb9fb5823a0895717c5a").await;
let nodestream = single_changeset_id(ctx.clone(), head_csid.clone(), &repo).boxify();
let nodestream =
ValidateNodeStream::new(ctx.clone(), nodestream, &changeset_fetcher).boxify();
assert_changesets_sequence(ctx, &repo, vec![head_csid], nodestream).await;
}
#[fbinit::test]
async fn slow_ready_validates(fb: FacebookInit) {
// Tests that we handle an input staying at NotReady for a while without panicking
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher = Arc::new(TestChangesetFetcher::new(repo));
let mut nodestream = ValidateNodeStream::new(
ctx,
NotReadyEmptyStream::new(10).boxify(),
&changeset_fetcher,
)
.compat();
assert!(nodestream.next().await.is_none());
}
#[fbinit::test]
#[should_panic]
async fn repeat_hash_panics(fb: FacebookInit)
|
}
}
#[fbinit::test]
#[should_panic]
async fn wrong_order_panics(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = Arc::new(linear::getrepo(fb).await);
let nodestream = single_changeset_id(
ctx.clone(),
string_to_bonsai(fb, &repo, "cb15ca4a43a59acff5388cea9648c162afde8372")
.await
.clone(),
&repo,
)
.chain(single_changeset_id(
ctx.clone(),
string_to_bonsai(fb, &repo, "3c15267ebf11807f3d772eb891272b911ec68759").await,
&repo,
));
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new((*repo).clone()));
let mut nodestream =
ValidateNodeStream::new(ctx, nodestream.boxify(), &changeset_fetcher).boxify();
loop {
match nodestream.poll() {
Ok(Async::Ready(None)) => return,
_ => {}
}
}
}
}
|
{
let ctx = CoreContext::test_mock(fb);
let repo = Arc::new(linear::getrepo(fb).await);
let head_csid =
string_to_bonsai(fb, &repo, "a5ffa77602a066db7d5cfb9fb5823a0895717c5a").await;
let nodestream = single_changeset_id(ctx.clone(), head_csid.clone(), &repo)
.chain(single_changeset_id(ctx.clone(), head_csid.clone(), &repo));
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new((*repo).clone()));
let mut nodestream =
ValidateNodeStream::new(ctx, nodestream.boxify(), &changeset_fetcher).boxify();
loop {
match nodestream.poll() {
Ok(Async::Ready(None)) => return,
_ => {}
}
|
identifier_body
|
validation.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use crate::setcommon::{add_generations_by_bonsai, BonsaiInputStream};
use crate::BonsaiNodeStream;
use anyhow::Error;
use changeset_fetcher::ArcChangesetFetcher;
use context::CoreContext;
use futures_ext::StreamExt;
use futures_old::stream::Stream;
use futures_old::{Async, Poll};
use mononoke_types::ChangesetId;
use mononoke_types::Generation;
use std::collections::HashSet;
/// A wrapper around a NodeStream that asserts that the two revset invariants hold:
/// 1. The generation number never increases
/// 2. No hash is seen twice
/// This uses memory proportional to the number of hashes in the revset.
pub struct ValidateNodeStream {
wrapped: BonsaiInputStream,
last_generation: Option<Generation>,
seen_hashes: HashSet<ChangesetId>,
}
impl ValidateNodeStream {
pub fn new(
ctx: CoreContext,
wrapped: BonsaiNodeStream,
changeset_fetcher: &ArcChangesetFetcher,
) -> ValidateNodeStream {
ValidateNodeStream {
wrapped: add_generations_by_bonsai(ctx, wrapped, changeset_fetcher.clone()).boxify(),
last_generation: None,
seen_hashes: HashSet::new(),
}
}
}
impl Stream for ValidateNodeStream {
type Item = ChangesetId;
type Error = Error;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
let next = self.wrapped.poll()?;
let (hash, gen) = match next {
Async::NotReady => return Ok(Async::NotReady),
Async::Ready(None) => return Ok(Async::Ready(None)),
Async::Ready(Some((hash, gen))) => (hash, gen),
};
assert!(self.seen_hashes.insert(hash), "Hash {} seen twice", hash);
assert!(
self.last_generation.is_none() || self.last_generation >= Some(gen),
"Generation number increased unexpectedly"
);
self.last_generation = Some(gen);
Ok(Async::Ready(Some(hash)))
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::fixtures::linear;
use crate::setcommon::NotReadyEmptyStream;
use crate::tests::TestChangesetFetcher;
use fbinit::FacebookInit;
use futures::{compat::Stream01CompatExt, stream::StreamExt as _};
use futures_ext::StreamExt;
use revset_test_helper::{assert_changesets_sequence, single_changeset_id, string_to_bonsai};
use std::sync::Arc;
#[fbinit::test]
async fn validate_accepts_single_node(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let head_csid =
string_to_bonsai(fb, &repo, "a5ffa77602a066db7d5cfb9fb5823a0895717c5a").await;
let nodestream = single_changeset_id(ctx.clone(), head_csid.clone(), &repo).boxify();
let nodestream =
ValidateNodeStream::new(ctx.clone(), nodestream, &changeset_fetcher).boxify();
assert_changesets_sequence(ctx, &repo, vec![head_csid], nodestream).await;
}
#[fbinit::test]
async fn slow_ready_validates(fb: FacebookInit) {
// Tests that we handle an input staying at NotReady for a while without panicking
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher = Arc::new(TestChangesetFetcher::new(repo));
let mut nodestream = ValidateNodeStream::new(
ctx,
NotReadyEmptyStream::new(10).boxify(),
&changeset_fetcher,
)
.compat();
assert!(nodestream.next().await.is_none());
}
#[fbinit::test]
#[should_panic]
async fn repeat_hash_panics(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = Arc::new(linear::getrepo(fb).await);
let head_csid =
string_to_bonsai(fb, &repo, "a5ffa77602a066db7d5cfb9fb5823a0895717c5a").await;
let nodestream = single_changeset_id(ctx.clone(), head_csid.clone(), &repo)
.chain(single_changeset_id(ctx.clone(), head_csid.clone(), &repo));
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new((*repo).clone()));
let mut nodestream =
ValidateNodeStream::new(ctx, nodestream.boxify(), &changeset_fetcher).boxify();
loop {
match nodestream.poll() {
Ok(Async::Ready(None)) => return,
_ => {}
}
}
}
#[fbinit::test]
#[should_panic]
async fn wrong_order_panics(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = Arc::new(linear::getrepo(fb).await);
let nodestream = single_changeset_id(
ctx.clone(),
string_to_bonsai(fb, &repo, "cb15ca4a43a59acff5388cea9648c162afde8372")
.await
.clone(),
&repo,
)
.chain(single_changeset_id(
ctx.clone(),
string_to_bonsai(fb, &repo, "3c15267ebf11807f3d772eb891272b911ec68759").await,
&repo,
));
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new((*repo).clone()));
let mut nodestream =
ValidateNodeStream::new(ctx, nodestream.boxify(), &changeset_fetcher).boxify();
loop {
match nodestream.poll() {
Ok(Async::Ready(None)) => return,
_ =>
|
}
}
}
}
|
{}
|
conditional_block
|
mod.rs
|
// SPDX-License-Identifier: Unlicense
|
//! debug console. Other devices are managed through user-level threads
//! and by receiving the relevant capabilities which allow them to access
//! the necessary system resources - chiefly physical memory and interrupts.
pub mod intc;
pub mod serial;
pub mod virtio;
use crate::archs::arch::Arch;
use crate::archs::DeviceTrait;
use crate::pager::{
get_range, Addr, AddrRange, HandlerReturnAction, PhysAddr, PhysAddrRange, RangeContent,
};
use crate::util::locked::Locked;
use crate::{Error, Result};
use alloc::{boxed::Box, collections::BTreeMap, string::String, sync::Arc};
use dtb::{StructItem, StructItems};
/// Pointer to Device Tree Blob in physical memory, if available.
///
/// Set during reset, before memory is overwritten, so that pager can reserve and map.
pub static mut PDTB: Option<PhysAddrRange> = None;
/// Get physical memory location from direct-mapped physical DTB address (to bootstrap paging)
///
/// Unsafety: This function must only be called while physical memory is identity-mapped.
pub unsafe fn get_ram_range_early() -> Result<PhysAddrRange> {
let dtb_addr = PDTB.ok_or(Error::UnInitialised)?.base();
let reader =
dtb::Reader::read_from_address(dtb_addr.get()).or(Err(Error::DeviceIncompatible))?;
let dtb_root = reader.struct_items();
let (prop, _) = dtb_root.path_struct_items("/memory/reg").next().unwrap();
let phys_addr_range = make_addr_range(prop)?;
Ok(phys_addr_range)
}
fn make_addr_range(prop: StructItem) -> Result<PhysAddrRange> {
let mut buf = [0u8; 32];
let list = prop
.value_u32_list(&mut buf)
.or(Err(Error::DeviceIncompatible))?;
Ok(PhysAddrRange::new(
PhysAddr::fixed((list[0] as usize) << 32 | (list[1] as usize)),
(list[2] as usize) << 32 | (list[3] as usize),
))
}
#[derive(Debug)]
enum DeviceTypes {
Unknown,
Block,
}
/// Initialise the device subsystem.
///
/// Discover and register available devices by iterating through device drivers.
pub fn init() -> Result<()> {
major!("init");
let dtb_root = get_dtb_root()?;
Arch::device_init(dtb_root.clone())?;
virtio::init(dtb_root)
}
fn get_dtb_root() -> Result<StructItems<'static>> {
let virt_addr = get_range(RangeContent::DTB)?.base();
let reader = unsafe {
dtb::Reader::read_from_address(virt_addr.get()).or(Err(Error::DeviceIncompatible))?
};
Ok(reader.struct_items())
}
pub trait InterruptController: Send {
fn add_handler(&mut self, interrupt: u8, handler: fn() -> HandlerReturnAction) -> Result<()>;
}
#[derive(Copy, Clone, Debug)]
#[repr(u8)]
pub enum RequestStatus {
Ok = 0,
IOErr = 1,
Unsupp = 2,
Init =!0,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct RequestId(u16);
#[derive(Copy, Clone, Debug)]
pub struct Sector(pub u64);
/// Functions for a block storage device
pub trait Block {
fn name(&self) -> String;
fn status(&mut self, id: RequestId) -> Result<u32>;
fn read(&mut self, page_addrs: &[PhysAddr], sector: Sector) -> Result<RequestId>;
fn write(&mut self, page_addrs: &[PhysAddr], sector: Sector) -> Result<RequestId>;
fn discard(&mut self, sector: Sector, pages: usize) -> Result<RequestId>;
fn zero(&mut self, sector: Sector, pages: usize) -> Result<RequestId>;
fn flush(&mut self) -> Result<RequestId>;
}
pub static BLOCK_DEVICES: Locked<BTreeMap<String, Arc<Locked<Box<dyn Block + Send>>>>> =
Locked::new(BTreeMap::new());
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
#[test]
fn can_call_on_test_arch() {
// init().expect("init");
}
}
|
//! A module for the kernel devices which must be used from the kernel.
//!
//! This is a very short list - interrupt controller, VM swap disk, and
|
random_line_split
|
mod.rs
|
// SPDX-License-Identifier: Unlicense
//! A module for the kernel devices which must be used from the kernel.
//!
//! This is a very short list - interrupt controller, VM swap disk, and
//! debug console. Other devices are managed through user-level threads
//! and by receiving the relevant capabilities which allow them to access
//! the necessary system resources - chiefly physical memory and interrupts.
pub mod intc;
pub mod serial;
pub mod virtio;
use crate::archs::arch::Arch;
use crate::archs::DeviceTrait;
use crate::pager::{
get_range, Addr, AddrRange, HandlerReturnAction, PhysAddr, PhysAddrRange, RangeContent,
};
use crate::util::locked::Locked;
use crate::{Error, Result};
use alloc::{boxed::Box, collections::BTreeMap, string::String, sync::Arc};
use dtb::{StructItem, StructItems};
/// Pointer to Device Tree Blob in physical memory, if available.
///
/// Set during reset, before memory is overwritten, so that pager can reserve and map.
pub static mut PDTB: Option<PhysAddrRange> = None;
/// Get physical memory location from direct-mapped physical DTB address (to bootstrap paging)
///
/// Unsafety: This function must only be called while physical memory is identity-mapped.
pub unsafe fn get_ram_range_early() -> Result<PhysAddrRange> {
let dtb_addr = PDTB.ok_or(Error::UnInitialised)?.base();
let reader =
dtb::Reader::read_from_address(dtb_addr.get()).or(Err(Error::DeviceIncompatible))?;
let dtb_root = reader.struct_items();
let (prop, _) = dtb_root.path_struct_items("/memory/reg").next().unwrap();
let phys_addr_range = make_addr_range(prop)?;
Ok(phys_addr_range)
}
fn make_addr_range(prop: StructItem) -> Result<PhysAddrRange> {
let mut buf = [0u8; 32];
let list = prop
.value_u32_list(&mut buf)
.or(Err(Error::DeviceIncompatible))?;
Ok(PhysAddrRange::new(
PhysAddr::fixed((list[0] as usize) << 32 | (list[1] as usize)),
(list[2] as usize) << 32 | (list[3] as usize),
))
}
#[derive(Debug)]
enum DeviceTypes {
Unknown,
Block,
}
/// Initialise the device subsystem.
///
/// Discover and register available devices by iterating through device drivers.
pub fn
|
() -> Result<()> {
major!("init");
let dtb_root = get_dtb_root()?;
Arch::device_init(dtb_root.clone())?;
virtio::init(dtb_root)
}
fn get_dtb_root() -> Result<StructItems<'static>> {
let virt_addr = get_range(RangeContent::DTB)?.base();
let reader = unsafe {
dtb::Reader::read_from_address(virt_addr.get()).or(Err(Error::DeviceIncompatible))?
};
Ok(reader.struct_items())
}
pub trait InterruptController: Send {
fn add_handler(&mut self, interrupt: u8, handler: fn() -> HandlerReturnAction) -> Result<()>;
}
#[derive(Copy, Clone, Debug)]
#[repr(u8)]
pub enum RequestStatus {
Ok = 0,
IOErr = 1,
Unsupp = 2,
Init =!0,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct RequestId(u16);
#[derive(Copy, Clone, Debug)]
pub struct Sector(pub u64);
/// Functions for a block storage device
pub trait Block {
fn name(&self) -> String;
fn status(&mut self, id: RequestId) -> Result<u32>;
fn read(&mut self, page_addrs: &[PhysAddr], sector: Sector) -> Result<RequestId>;
fn write(&mut self, page_addrs: &[PhysAddr], sector: Sector) -> Result<RequestId>;
fn discard(&mut self, sector: Sector, pages: usize) -> Result<RequestId>;
fn zero(&mut self, sector: Sector, pages: usize) -> Result<RequestId>;
fn flush(&mut self) -> Result<RequestId>;
}
pub static BLOCK_DEVICES: Locked<BTreeMap<String, Arc<Locked<Box<dyn Block + Send>>>>> =
Locked::new(BTreeMap::new());
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
#[test]
fn can_call_on_test_arch() {
// init().expect("init");
}
}
|
init
|
identifier_name
|
jobs_load.rs
|
use db;
use elephant;
use json_loader_elephant::json_loader_elephant;
use rusqlite::Connection;
use rustc_serialize::json::Json;
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::path::Path;
pub fn listy2(direcory: &str) -> Vec<String> {
let mut items = Vec::<String>::new();
let path = Path::new(direcory);
if path.exists() == false {
return items;
}
if path.is_dir() == false {
return items;
}
for entry in path.read_dir().expect("read_dir call failed") {
if let Ok(entry) = entry {
let path = entry.path();
if path.is_dir() {
continue;
}
|
}
return items;
}
pub fn loader(name: &str) -> String {
let mut f = File::open(name).expect("file not found");
let mut contents = String::new();
f.read_to_string(&mut contents)
.expect("something went wrong reading the file");
return contents;
}
pub fn json_loader_name(conn: &Connection, pk_file: &i32, content: &str) {
let json = Json::from_str(&content);
match json {
Ok(json) => {
json_loader_elephant(conn, &pk_file, &json);
//json_loader_elephant_deps_depth1(conn, &pk_file, &json);
}
Err(_) => {}
}
}
pub fn load(conn: &Connection) {
for directory in db::list_fs_dir(&conn) {
let foo = directory.id;
let foo_name = directory.name;
let file_list = listy2(&foo_name);
for fnccc in file_list {
//println!("An input file: {}", fnccc);
let s1 = fnccc.clone();
let _ = db::insert_fs_file(&conn, foo, fnccc);
let mut pk = 10;
db::pk_fs_file_by_name(&conn, s1, &mut pk);
//println!("ssss: {}", pk);
}
}
let mut scores = HashMap::new();
let str_job_files_list = String::from("job_files");
let result_dir_type = elephant::elephant_directory_type(&conn, &str_job_files_list);
if result_dir_type.is_err() {
return;
}
let pk_directory_type_jobs = result_dir_type.unwrap();
for filename in db::list_fs_file_type(&conn, &pk_directory_type_jobs) {
let name = String::from(filename.name);
let name2 = name.clone();
let loader_rc = loader(name2.trim());
scores.insert(name, loader_rc);
}
// iterate over everything.
for (filename, contents) in &scores {
let mut pkfsfile: i32 = 0;
let filename_str = filename.clone();
let _ = db::pk_fs_file_by_name(&conn, filename_str, &mut pkfsfile);
let _ = json_loader_name(&conn, &pkfsfile, &contents);
}
db::variable_pair_list(&conn);
}
|
let pb_2 = path.as_path();
let as_path_buf = pb_2.as_os_str();
let path = String::new() + as_path_buf.to_str().unwrap();
items.push(path);
}
|
random_line_split
|
jobs_load.rs
|
use db;
use elephant;
use json_loader_elephant::json_loader_elephant;
use rusqlite::Connection;
use rustc_serialize::json::Json;
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::path::Path;
pub fn listy2(direcory: &str) -> Vec<String>
|
}
}
return items;
}
pub fn loader(name: &str) -> String {
let mut f = File::open(name).expect("file not found");
let mut contents = String::new();
f.read_to_string(&mut contents)
.expect("something went wrong reading the file");
return contents;
}
pub fn json_loader_name(conn: &Connection, pk_file: &i32, content: &str) {
let json = Json::from_str(&content);
match json {
Ok(json) => {
json_loader_elephant(conn, &pk_file, &json);
//json_loader_elephant_deps_depth1(conn, &pk_file, &json);
}
Err(_) => {}
}
}
pub fn load(conn: &Connection) {
for directory in db::list_fs_dir(&conn) {
let foo = directory.id;
let foo_name = directory.name;
let file_list = listy2(&foo_name);
for fnccc in file_list {
//println!("An input file: {}", fnccc);
let s1 = fnccc.clone();
let _ = db::insert_fs_file(&conn, foo, fnccc);
let mut pk = 10;
db::pk_fs_file_by_name(&conn, s1, &mut pk);
//println!("ssss: {}", pk);
}
}
let mut scores = HashMap::new();
let str_job_files_list = String::from("job_files");
let result_dir_type = elephant::elephant_directory_type(&conn, &str_job_files_list);
if result_dir_type.is_err() {
return;
}
let pk_directory_type_jobs = result_dir_type.unwrap();
for filename in db::list_fs_file_type(&conn, &pk_directory_type_jobs) {
let name = String::from(filename.name);
let name2 = name.clone();
let loader_rc = loader(name2.trim());
scores.insert(name, loader_rc);
}
// iterate over everything.
for (filename, contents) in &scores {
let mut pkfsfile: i32 = 0;
let filename_str = filename.clone();
let _ = db::pk_fs_file_by_name(&conn, filename_str, &mut pkfsfile);
let _ = json_loader_name(&conn, &pkfsfile, &contents);
}
db::variable_pair_list(&conn);
}
|
{
let mut items = Vec::<String>::new();
let path = Path::new(direcory);
if path.exists() == false {
return items;
}
if path.is_dir() == false {
return items;
}
for entry in path.read_dir().expect("read_dir call failed") {
if let Ok(entry) = entry {
let path = entry.path();
if path.is_dir() {
continue;
}
let pb_2 = path.as_path();
let as_path_buf = pb_2.as_os_str();
let path = String::new() + as_path_buf.to_str().unwrap();
items.push(path);
|
identifier_body
|
jobs_load.rs
|
use db;
use elephant;
use json_loader_elephant::json_loader_elephant;
use rusqlite::Connection;
use rustc_serialize::json::Json;
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::path::Path;
pub fn listy2(direcory: &str) -> Vec<String> {
let mut items = Vec::<String>::new();
let path = Path::new(direcory);
if path.exists() == false {
return items;
}
if path.is_dir() == false {
return items;
}
for entry in path.read_dir().expect("read_dir call failed") {
if let Ok(entry) = entry {
let path = entry.path();
if path.is_dir() {
continue;
}
let pb_2 = path.as_path();
let as_path_buf = pb_2.as_os_str();
let path = String::new() + as_path_buf.to_str().unwrap();
items.push(path);
}
}
return items;
}
pub fn
|
(name: &str) -> String {
let mut f = File::open(name).expect("file not found");
let mut contents = String::new();
f.read_to_string(&mut contents)
.expect("something went wrong reading the file");
return contents;
}
pub fn json_loader_name(conn: &Connection, pk_file: &i32, content: &str) {
let json = Json::from_str(&content);
match json {
Ok(json) => {
json_loader_elephant(conn, &pk_file, &json);
//json_loader_elephant_deps_depth1(conn, &pk_file, &json);
}
Err(_) => {}
}
}
pub fn load(conn: &Connection) {
for directory in db::list_fs_dir(&conn) {
let foo = directory.id;
let foo_name = directory.name;
let file_list = listy2(&foo_name);
for fnccc in file_list {
//println!("An input file: {}", fnccc);
let s1 = fnccc.clone();
let _ = db::insert_fs_file(&conn, foo, fnccc);
let mut pk = 10;
db::pk_fs_file_by_name(&conn, s1, &mut pk);
//println!("ssss: {}", pk);
}
}
let mut scores = HashMap::new();
let str_job_files_list = String::from("job_files");
let result_dir_type = elephant::elephant_directory_type(&conn, &str_job_files_list);
if result_dir_type.is_err() {
return;
}
let pk_directory_type_jobs = result_dir_type.unwrap();
for filename in db::list_fs_file_type(&conn, &pk_directory_type_jobs) {
let name = String::from(filename.name);
let name2 = name.clone();
let loader_rc = loader(name2.trim());
scores.insert(name, loader_rc);
}
// iterate over everything.
for (filename, contents) in &scores {
let mut pkfsfile: i32 = 0;
let filename_str = filename.clone();
let _ = db::pk_fs_file_by_name(&conn, filename_str, &mut pkfsfile);
let _ = json_loader_name(&conn, &pkfsfile, &contents);
}
db::variable_pair_list(&conn);
}
|
loader
|
identifier_name
|
lib.rs
|
according to those terms.
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "rustdoc"]
#![unstable(feature = "rustdoc", issue = "27812")]
#![staged_api]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/",
html_playground_url = "https://play.rust-lang.org/")]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(dynamic_lib)]
#![feature(libc)]
#![feature(path_ext)]
#![feature(path_relative_from)]
#![feature(rustc_private)]
#![feature(set_stdio)]
#![feature(slice_patterns)]
#![feature(staged_api)]
#![feature(test)]
#![feature(unicode)]
#![feature(vec_push_all)]
extern crate arena;
extern crate getopts;
extern crate libc;
extern crate rustc;
extern crate rustc_trans;
extern crate rustc_driver;
extern crate rustc_resolve;
extern crate rustc_lint;
extern crate rustc_back;
extern crate serialize;
extern crate syntax;
extern crate test as testing;
extern crate rustc_unicode;
#[macro_use] extern crate log;
extern crate serialize as rustc_serialize; // used by deriving
use std::cell::RefCell;
use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::{self, Read, Write};
use std::path::PathBuf;
use std::process;
use std::rc::Rc;
use std::sync::mpsc::channel;
use externalfiles::ExternalHtml;
use serialize::Decodable;
use serialize::json::{self, Json};
use rustc::session::search_paths::SearchPaths;
// reexported from `clean` so it can be easily updated with the mod itself
pub use clean::SCHEMA_VERSION;
#[macro_use]
pub mod externalfiles;
pub mod clean;
pub mod core;
pub mod doctree;
pub mod fold;
pub mod html {
pub mod highlight;
pub mod escape;
pub mod item_type;
pub mod format;
pub mod layout;
pub mod markdown;
pub mod render;
pub mod toc;
}
pub mod markdown;
pub mod passes;
pub mod plugins;
pub mod visit_ast;
pub mod test;
mod flock;
type Pass = (&'static str, // name
fn(clean::Crate) -> plugins::PluginResult, // fn
&'static str); // description
const PASSES: &'static [Pass] = &[
("strip-hidden", passes::strip_hidden,
"strips all doc(hidden) items from the output"),
("unindent-comments", passes::unindent_comments,
"removes excess indentation on comments in order for markdown to like it"),
("collapse-docs", passes::collapse_docs,
"concatenates all document attributes into one document attribute"),
("strip-private", passes::strip_private,
"strips all private items from a crate which cannot be seen externally"),
];
const DEFAULT_PASSES: &'static [&'static str] = &[
"strip-hidden",
"strip-private",
"collapse-docs",
"unindent-comments",
];
thread_local!(pub static ANALYSISKEY: Rc<RefCell<Option<core::CrateAnalysis>>> = {
Rc::new(RefCell::new(None))
});
struct Output {
krate: clean::Crate,
json_plugins: Vec<plugins::PluginJson>,
passes: Vec<String>,
}
pub fn main() {
const STACK_SIZE: usize = 32000000; // 32MB
let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
let s = env::args().collect::<Vec<_>>();
main_args(&s)
}).unwrap().join().unwrap_or(101);
process::exit(res as i32);
}
pub fn opts() -> Vec<getopts::OptGroup> {
use getopts::*;
vec!(
optflag("h", "help", "show this help message"),
optflag("V", "version", "print rustdoc's version"),
optflag("v", "verbose", "use verbose output"),
optopt("r", "input-format", "the input type of the specified file",
"[rust|json]"),
optopt("w", "output-format", "the output type to write",
"[html|json]"),
optopt("o", "output", "where to place the output", "PATH"),
optopt("", "crate-name", "specify the name of this crate", "NAME"),
optmulti("L", "library-path", "directory to add to crate search path",
"DIR"),
optmulti("", "cfg", "pass a --cfg to rustc", ""),
optmulti("", "extern", "pass an --extern to rustc", "NAME=PATH"),
optmulti("", "plugin-path", "directory to load plugins from", "DIR"),
optmulti("", "passes", "list of passes to also run, you might want \
to pass it multiple times; a value of `list` \
will print available passes",
"PASSES"),
optmulti("", "plugins", "space separated list of plugins to also load",
"PLUGINS"),
optflag("", "no-defaults", "don't run the default passes"),
optflag("", "test", "run code examples as tests"),
optmulti("", "test-args", "arguments to pass to the test runner",
"ARGS"),
optopt("", "target", "target triple to document", "TRIPLE"),
optmulti("", "markdown-css", "CSS files to include via <link> in a rendered Markdown file",
"FILES"),
optmulti("", "html-in-header",
"files to include inline in the <head> section of a rendered Markdown file \
or generated documentation",
"FILES"),
optmulti("", "html-before-content",
"files to include inline between <body> and the content of a rendered \
Markdown file or generated documentation",
"FILES"),
optmulti("", "html-after-content",
"files to include inline between the content and </body> of a rendered \
Markdown file or generated documentation",
"FILES"),
optopt("", "markdown-playground-url",
"URL to send code snippets to", "URL"),
optflag("", "markdown-no-toc", "don't include table of contents")
)
}
pub fn usage(argv0: &str) {
println!("{}",
getopts::usage(&format!("{} [options] <input>", argv0),
&opts()));
}
pub fn main_args(args: &[String]) -> isize {
let matches = match getopts::getopts(&args[1..], &opts()) {
Ok(m) => m,
Err(err) => {
println!("{}", err);
return 1;
}
};
if matches.opt_present("h") || matches.opt_present("help") {
usage(&args[0]);
return 0;
} else if matches.opt_present("version") {
rustc_driver::version("rustdoc", &matches);
return 0;
}
if matches.opt_strs("passes") == ["list"] {
println!("Available passes for running rustdoc:");
for &(name, _, description) in PASSES {
println!("{:>20} - {}", name, description);
}
println!("{}", "\nDefault passes for rustdoc:"); // FIXME: #9970
for &name in DEFAULT_PASSES {
println!("{:>20}", name);
}
return 0;
}
if matches.free.is_empty() {
println!("expected an input file to act on");
return 1;
} if matches.free.len() > 1 {
println!("only one input file may be specified");
return 1;
}
let input = &matches.free[0];
let mut libs = SearchPaths::new();
for s in &matches.opt_strs("L") {
libs.add_path(s);
}
let externs = match parse_externs(&matches) {
Ok(ex) => ex,
Err(err) => {
println!("{}", err);
return 1;
}
};
let test_args = matches.opt_strs("test-args");
let test_args: Vec<String> = test_args.iter()
.flat_map(|s| s.split_whitespace())
.map(|s| s.to_string())
.collect();
let should_test = matches.opt_present("test");
let markdown_input = input.ends_with(".md") || input.ends_with(".markdown");
let output = matches.opt_str("o").map(|s| PathBuf::from(&s));
let cfgs = matches.opt_strs("cfg");
let external_html = match ExternalHtml::load(
&matches.opt_strs("html-in-header"),
&matches.opt_strs("html-before-content"),
&matches.opt_strs("html-after-content")) {
Some(eh) => eh,
None => return 3
};
let crate_name = matches.opt_str("crate-name");
match (should_test, markdown_input) {
(true, true) => {
return markdown::test(input, libs, externs, test_args)
}
(true, false) => {
return test::run(input, cfgs, libs, externs, test_args, crate_name)
}
(false, true) => return markdown::render(input,
output.unwrap_or(PathBuf::from("doc")),
&matches, &external_html,
!matches.opt_present("markdown-no-toc")),
(false, false) => {}
}
let out = match acquire_input(input, externs, &matches) {
Ok(out) => out,
Err(s) => {
println!("input error: {}", s);
return 1;
}
};
let Output { krate, json_plugins, passes, } = out;
info!("going to format");
match matches.opt_str("w").as_ref().map(|s| &**s) {
Some("html") | None => {
match html::render::run(krate, &external_html,
output.unwrap_or(PathBuf::from("doc")),
passes.into_iter().collect()) {
Ok(()) => {}
Err(e) => panic!("failed to generate documentation: {}", e),
}
}
Some("json") => {
match json_output(krate, json_plugins,
output.unwrap_or(PathBuf::from("doc.json"))) {
Ok(()) => {}
Err(e) => panic!("failed to write json: {}", e),
}
}
Some(s) => {
println!("unknown output format: {}", s);
return 1;
}
}
return 0;
}
/// Looks inside the command line arguments to extract the relevant input format
/// and files and then generates the necessary rustdoc output for formatting.
fn acquire_input(input: &str,
externs: core::Externs,
matches: &getopts::Matches) -> Result<Output, String> {
match matches.opt_str("r").as_ref().map(|s| &**s) {
Some("rust") => Ok(rust_input(input, externs, matches)),
Some("json") => json_input(input),
Some(s) => Err(format!("unknown input format: {}", s)),
None => {
if input.ends_with(".json") {
json_input(input)
} else {
Ok(rust_input(input, externs, matches))
}
}
}
}
/// Extracts `--extern CRATE=PATH` arguments from `matches` and
/// returns a `HashMap` mapping crate names to their paths or else an
/// error message.
fn parse_externs(matches: &getopts::Matches) -> Result<core::Externs, String> {
let mut externs = HashMap::new();
for arg in &matches.opt_strs("extern") {
let mut parts = arg.splitn(2, '=');
let name = match parts.next() {
Some(s) => s,
None => {
return Err("--extern value must not be empty".to_string());
}
};
let location = match parts.next() {
Some(s) => s,
None => {
return Err("--extern value must be of the format `foo=bar`".to_string());
}
};
let name = name.to_string();
externs.entry(name).or_insert(vec![]).push(location.to_string());
}
Ok(externs)
}
/// Interprets the input file as a rust source file, passing it through the
/// compiler all the way through the analysis passes. The rustdoc output is then
/// generated from the cleaned AST of the crate.
///
/// This form of input will run all of the plug/cleaning passes
fn rust_input(cratefile: &str, externs: core::Externs, matches: &getopts::Matches) -> Output {
let mut default_passes =!matches.opt_present("no-defaults");
let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins");
// First, parse the crate and extract all relevant information.
let mut paths = SearchPaths::new();
for s in &matches.opt_strs("L") {
paths.add_path(s);
}
let cfgs = matches.opt_strs("cfg");
let triple = matches.opt_str("target");
let cr = PathBuf::from(cratefile);
info!("starting to run rustc");
let (tx, rx) = channel();
rustc_driver::monitor(move || {
use rustc::session::config::Input;
tx.send(core::run_core(paths, cfgs, externs, Input::File(cr),
triple)).unwrap();
});
let (mut krate, analysis) = rx.recv().unwrap();
info!("finished with rustc");
let mut analysis = Some(analysis);
ANALYSISKEY.with(|s| {
*s.borrow_mut() = analysis.take();
});
match matches.opt_str("crate-name") {
Some(name) => krate.name = name,
None => {}
}
// Process all of the crate attributes, extracting plugin metadata along
// with the passes which we are supposed to run.
match krate.module.as_ref().unwrap().doc_list() {
Some(nested) => {
for inner in nested {
match *inner {
clean::Word(ref x)
if "no_default_passes" == *x => {
default_passes = false;
}
clean::NameValue(ref x, ref value)
if "passes" == *x => {
for pass in value.split_whitespace() {
passes.push(pass.to_string());
}
}
clean::NameValue(ref x, ref value)
if "plugins" == *x => {
for p in value.split_whitespace() {
plugins.push(p.to_string());
}
}
_ => {}
}
}
}
None => {}
}
if default_passes {
for name in DEFAULT_PASSES.iter().rev() {
passes.insert(0, name.to_string());
}
}
// Load all plugins/passes into a PluginManager
let path = matches.opt_str("plugin-path")
.unwrap_or("/tmp/rustdoc/plugins".to_string());
let mut pm = plugins::PluginManager::new(PathBuf::from(path));
for pass in &passes {
let plugin = match PASSES.iter()
.position(|&(p, _, _)| {
p == *pass
}) {
Some(i) => PASSES[i].1,
None => {
error!("unknown pass {}, skipping", *pass);
continue
},
};
pm.add_plugin(plugin);
}
info!("loading plugins...");
for pname in plugins {
pm.load_plugin(pname);
}
// Run everything!
info!("Executing passes/plugins");
let (krate, json) = pm.run_plugins(krate);
return Output { krate: krate, json_plugins: json, passes: passes, };
}
/// This input format purely deserializes the json output file. No passes are
/// run over the deserialized output.
fn
|
(input: &str) -> Result<Output, String> {
let mut bytes = Vec::new();
match File::open(input).and_then(|mut f| f.read_to_end(&mut bytes)) {
Ok(_) => {}
Err(e) => return Err(format!("couldn't open {}: {}", input, e)),
};
match json::from_reader(&mut &bytes[..]) {
Err(s) => Err(format!("{:?}", s)),
Ok(Json::Object(obj)) => {
let mut obj = obj;
// Make sure the schema is what we expect
match obj.remove(&"schema".to_string()) {
Some(Json::String(version)) => {
if version!= SCHEMA_VERSION {
return Err(format!(
"sorry, but I only understand version {}",
SCHEMA_VERSION))
}
}
Some(..) => return Err("malformed json".to_string()),
None => return Err("expected a schema version".to_string()),
}
let krate = match obj.remove(&"crate".to_string()) {
Some(json) => {
let mut d = json::Decoder::new(json);
Decodable::decode(&mut d).unwrap()
}
None => return Err("malformed json".to_string()),
};
// FIXME: this should read from the "plugins" field, but currently
// Json doesn't implement decodable...
let plugin_output = Vec::new();
Ok(Output { krate: krate, json_plugins: plugin_output, passes: Vec::new(), })
}
Ok(..) => {
Err("malformed json input: expected an object at the \
top".to_string())
}
}
}
/// Outputs the crate/plugin json as a giant json blob at the specified
/// destination.
fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson>,
dst: PathBuf) -> io::Result<()> {
// {
// "schema": version,
// "crate": { parsed crate... },
// "plugins": { output of plugins... }
// }
let mut json = std::collections::BTreeMap::new();
json.insert("schema".to_string(), Json::String(SCHEMA_VERSION.to_string()));
let plugins_json = res.into_iter()
.filter_map(|opt| {
match opt {
None => None,
Some((string, json)) => {
Some((string.to_string(), json))
|
json_input
|
identifier_name
|
lib.rs
|
except according to those terms.
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "rustdoc"]
#![unstable(feature = "rustdoc", issue = "27812")]
#![staged_api]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/",
html_playground_url = "https://play.rust-lang.org/")]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(dynamic_lib)]
#![feature(libc)]
#![feature(path_ext)]
#![feature(path_relative_from)]
#![feature(rustc_private)]
#![feature(set_stdio)]
#![feature(slice_patterns)]
#![feature(staged_api)]
#![feature(test)]
#![feature(unicode)]
#![feature(vec_push_all)]
extern crate arena;
extern crate getopts;
extern crate libc;
extern crate rustc;
extern crate rustc_trans;
extern crate rustc_driver;
extern crate rustc_resolve;
extern crate rustc_lint;
extern crate rustc_back;
extern crate serialize;
extern crate syntax;
extern crate test as testing;
extern crate rustc_unicode;
#[macro_use] extern crate log;
extern crate serialize as rustc_serialize; // used by deriving
use std::cell::RefCell;
use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::{self, Read, Write};
use std::path::PathBuf;
use std::process;
use std::rc::Rc;
use std::sync::mpsc::channel;
use externalfiles::ExternalHtml;
use serialize::Decodable;
use serialize::json::{self, Json};
use rustc::session::search_paths::SearchPaths;
// reexported from `clean` so it can be easily updated with the mod itself
pub use clean::SCHEMA_VERSION;
#[macro_use]
pub mod externalfiles;
pub mod clean;
pub mod core;
pub mod doctree;
pub mod fold;
pub mod html {
pub mod highlight;
pub mod escape;
pub mod item_type;
pub mod format;
pub mod layout;
pub mod markdown;
pub mod render;
pub mod toc;
}
pub mod markdown;
pub mod passes;
pub mod plugins;
pub mod visit_ast;
pub mod test;
mod flock;
type Pass = (&'static str, // name
fn(clean::Crate) -> plugins::PluginResult, // fn
&'static str); // description
const PASSES: &'static [Pass] = &[
("strip-hidden", passes::strip_hidden,
"strips all doc(hidden) items from the output"),
("unindent-comments", passes::unindent_comments,
"removes excess indentation on comments in order for markdown to like it"),
("collapse-docs", passes::collapse_docs,
"concatenates all document attributes into one document attribute"),
("strip-private", passes::strip_private,
"strips all private items from a crate which cannot be seen externally"),
];
const DEFAULT_PASSES: &'static [&'static str] = &[
"strip-hidden",
"strip-private",
"collapse-docs",
"unindent-comments",
];
thread_local!(pub static ANALYSISKEY: Rc<RefCell<Option<core::CrateAnalysis>>> = {
Rc::new(RefCell::new(None))
});
struct Output {
krate: clean::Crate,
json_plugins: Vec<plugins::PluginJson>,
passes: Vec<String>,
}
pub fn main() {
const STACK_SIZE: usize = 32000000; // 32MB
let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
let s = env::args().collect::<Vec<_>>();
main_args(&s)
}).unwrap().join().unwrap_or(101);
process::exit(res as i32);
}
pub fn opts() -> Vec<getopts::OptGroup> {
use getopts::*;
vec!(
optflag("h", "help", "show this help message"),
optflag("V", "version", "print rustdoc's version"),
optflag("v", "verbose", "use verbose output"),
optopt("r", "input-format", "the input type of the specified file",
"[rust|json]"),
optopt("w", "output-format", "the output type to write",
"[html|json]"),
optopt("o", "output", "where to place the output", "PATH"),
optopt("", "crate-name", "specify the name of this crate", "NAME"),
optmulti("L", "library-path", "directory to add to crate search path",
"DIR"),
optmulti("", "cfg", "pass a --cfg to rustc", ""),
optmulti("", "extern", "pass an --extern to rustc", "NAME=PATH"),
optmulti("", "plugin-path", "directory to load plugins from", "DIR"),
optmulti("", "passes", "list of passes to also run, you might want \
to pass it multiple times; a value of `list` \
will print available passes",
"PASSES"),
optmulti("", "plugins", "space separated list of plugins to also load",
"PLUGINS"),
optflag("", "no-defaults", "don't run the default passes"),
optflag("", "test", "run code examples as tests"),
optmulti("", "test-args", "arguments to pass to the test runner",
"ARGS"),
optopt("", "target", "target triple to document", "TRIPLE"),
optmulti("", "markdown-css", "CSS files to include via <link> in a rendered Markdown file",
"FILES"),
optmulti("", "html-in-header",
"files to include inline in the <head> section of a rendered Markdown file \
or generated documentation",
"FILES"),
optmulti("", "html-before-content",
"files to include inline between <body> and the content of a rendered \
Markdown file or generated documentation",
"FILES"),
optmulti("", "html-after-content",
"files to include inline between the content and </body> of a rendered \
Markdown file or generated documentation",
"FILES"),
optopt("", "markdown-playground-url",
"URL to send code snippets to", "URL"),
optflag("", "markdown-no-toc", "don't include table of contents")
)
}
pub fn usage(argv0: &str) {
println!("{}",
getopts::usage(&format!("{} [options] <input>", argv0),
&opts()));
}
pub fn main_args(args: &[String]) -> isize {
let matches = match getopts::getopts(&args[1..], &opts()) {
Ok(m) => m,
Err(err) => {
println!("{}", err);
return 1;
}
};
if matches.opt_present("h") || matches.opt_present("help") {
usage(&args[0]);
return 0;
} else if matches.opt_present("version") {
rustc_driver::version("rustdoc", &matches);
return 0;
}
if matches.opt_strs("passes") == ["list"] {
println!("Available passes for running rustdoc:");
for &(name, _, description) in PASSES {
println!("{:>20} - {}", name, description);
}
println!("{}", "\nDefault passes for rustdoc:"); // FIXME: #9970
for &name in DEFAULT_PASSES {
println!("{:>20}", name);
}
return 0;
}
if matches.free.is_empty() {
println!("expected an input file to act on");
return 1;
} if matches.free.len() > 1 {
println!("only one input file may be specified");
return 1;
}
let input = &matches.free[0];
let mut libs = SearchPaths::new();
for s in &matches.opt_strs("L") {
libs.add_path(s);
}
let externs = match parse_externs(&matches) {
Ok(ex) => ex,
Err(err) => {
println!("{}", err);
return 1;
}
};
let test_args = matches.opt_strs("test-args");
let test_args: Vec<String> = test_args.iter()
.flat_map(|s| s.split_whitespace())
.map(|s| s.to_string())
.collect();
let should_test = matches.opt_present("test");
let markdown_input = input.ends_with(".md") || input.ends_with(".markdown");
let output = matches.opt_str("o").map(|s| PathBuf::from(&s));
let cfgs = matches.opt_strs("cfg");
let external_html = match ExternalHtml::load(
&matches.opt_strs("html-in-header"),
&matches.opt_strs("html-before-content"),
&matches.opt_strs("html-after-content")) {
Some(eh) => eh,
None => return 3
};
let crate_name = matches.opt_str("crate-name");
match (should_test, markdown_input) {
(true, true) => {
return markdown::test(input, libs, externs, test_args)
}
(true, false) => {
return test::run(input, cfgs, libs, externs, test_args, crate_name)
}
(false, true) => return markdown::render(input,
output.unwrap_or(PathBuf::from("doc")),
&matches, &external_html,
!matches.opt_present("markdown-no-toc")),
(false, false) => {}
}
let out = match acquire_input(input, externs, &matches) {
Ok(out) => out,
Err(s) => {
println!("input error: {}", s);
return 1;
}
};
let Output { krate, json_plugins, passes, } = out;
info!("going to format");
match matches.opt_str("w").as_ref().map(|s| &**s) {
Some("html") | None => {
match html::render::run(krate, &external_html,
output.unwrap_or(PathBuf::from("doc")),
passes.into_iter().collect()) {
Ok(()) => {}
Err(e) => panic!("failed to generate documentation: {}", e),
}
}
Some("json") => {
match json_output(krate, json_plugins,
output.unwrap_or(PathBuf::from("doc.json"))) {
Ok(()) => {}
Err(e) => panic!("failed to write json: {}", e),
}
}
Some(s) => {
println!("unknown output format: {}", s);
return 1;
}
}
return 0;
}
/// Looks inside the command line arguments to extract the relevant input format
/// and files and then generates the necessary rustdoc output for formatting.
fn acquire_input(input: &str,
externs: core::Externs,
matches: &getopts::Matches) -> Result<Output, String> {
match matches.opt_str("r").as_ref().map(|s| &**s) {
Some("rust") => Ok(rust_input(input, externs, matches)),
Some("json") => json_input(input),
Some(s) => Err(format!("unknown input format: {}", s)),
None => {
if input.ends_with(".json") {
json_input(input)
} else {
Ok(rust_input(input, externs, matches))
}
}
}
}
/// Extracts `--extern CRATE=PATH` arguments from `matches` and
/// returns a `HashMap` mapping crate names to their paths or else an
/// error message.
fn parse_externs(matches: &getopts::Matches) -> Result<core::Externs, String> {
let mut externs = HashMap::new();
for arg in &matches.opt_strs("extern") {
let mut parts = arg.splitn(2, '=');
let name = match parts.next() {
Some(s) => s,
None => {
return Err("--extern value must not be empty".to_string());
}
};
let location = match parts.next() {
Some(s) => s,
None => {
return Err("--extern value must be of the format `foo=bar`".to_string());
}
};
let name = name.to_string();
externs.entry(name).or_insert(vec![]).push(location.to_string());
}
Ok(externs)
}
/// Interprets the input file as a rust source file, passing it through the
/// compiler all the way through the analysis passes. The rustdoc output is then
/// generated from the cleaned AST of the crate.
///
/// This form of input will run all of the plug/cleaning passes
fn rust_input(cratefile: &str, externs: core::Externs, matches: &getopts::Matches) -> Output {
let mut default_passes =!matches.opt_present("no-defaults");
let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins");
// First, parse the crate and extract all relevant information.
let mut paths = SearchPaths::new();
for s in &matches.opt_strs("L") {
paths.add_path(s);
}
let cfgs = matches.opt_strs("cfg");
let triple = matches.opt_str("target");
let cr = PathBuf::from(cratefile);
info!("starting to run rustc");
let (tx, rx) = channel();
rustc_driver::monitor(move || {
use rustc::session::config::Input;
tx.send(core::run_core(paths, cfgs, externs, Input::File(cr),
triple)).unwrap();
});
let (mut krate, analysis) = rx.recv().unwrap();
info!("finished with rustc");
let mut analysis = Some(analysis);
ANALYSISKEY.with(|s| {
*s.borrow_mut() = analysis.take();
});
match matches.opt_str("crate-name") {
Some(name) => krate.name = name,
None => {}
}
// Process all of the crate attributes, extracting plugin metadata along
// with the passes which we are supposed to run.
match krate.module.as_ref().unwrap().doc_list() {
Some(nested) => {
for inner in nested {
match *inner {
clean::Word(ref x)
if "no_default_passes" == *x => {
default_passes = false;
}
clean::NameValue(ref x, ref value)
if "passes" == *x => {
for pass in value.split_whitespace() {
passes.push(pass.to_string());
}
}
clean::NameValue(ref x, ref value)
if "plugins" == *x => {
for p in value.split_whitespace() {
plugins.push(p.to_string());
}
}
_ => {}
}
}
}
None => {}
}
if default_passes {
for name in DEFAULT_PASSES.iter().rev() {
passes.insert(0, name.to_string());
}
}
// Load all plugins/passes into a PluginManager
let path = matches.opt_str("plugin-path")
.unwrap_or("/tmp/rustdoc/plugins".to_string());
let mut pm = plugins::PluginManager::new(PathBuf::from(path));
for pass in &passes {
let plugin = match PASSES.iter()
.position(|&(p, _, _)| {
p == *pass
}) {
Some(i) => PASSES[i].1,
None => {
error!("unknown pass {}, skipping", *pass);
continue
},
};
pm.add_plugin(plugin);
}
info!("loading plugins...");
for pname in plugins {
pm.load_plugin(pname);
}
// Run everything!
info!("Executing passes/plugins");
let (krate, json) = pm.run_plugins(krate);
return Output { krate: krate, json_plugins: json, passes: passes, };
}
/// This input format purely deserializes the json output file. No passes are
/// run over the deserialized output.
fn json_input(input: &str) -> Result<Output, String> {
let mut bytes = Vec::new();
match File::open(input).and_then(|mut f| f.read_to_end(&mut bytes)) {
Ok(_) => {}
Err(e) => return Err(format!("couldn't open {}: {}", input, e)),
};
match json::from_reader(&mut &bytes[..]) {
Err(s) => Err(format!("{:?}", s)),
Ok(Json::Object(obj)) => {
let mut obj = obj;
// Make sure the schema is what we expect
match obj.remove(&"schema".to_string()) {
Some(Json::String(version)) => {
if version!= SCHEMA_VERSION {
return Err(format!(
"sorry, but I only understand version {}",
SCHEMA_VERSION))
}
}
Some(..) => return Err("malformed json".to_string()),
None => return Err("expected a schema version".to_string()),
}
let krate = match obj.remove(&"crate".to_string()) {
Some(json) => {
let mut d = json::Decoder::new(json);
Decodable::decode(&mut d).unwrap()
}
None => return Err("malformed json".to_string()),
};
// FIXME: this should read from the "plugins" field, but currently
// Json doesn't implement decodable...
let plugin_output = Vec::new();
Ok(Output { krate: krate, json_plugins: plugin_output, passes: Vec::new(), })
}
Ok(..) => {
Err("malformed json input: expected an object at the \
top".to_string())
}
}
}
/// Outputs the crate/plugin json as a giant json blob at the specified
/// destination.
fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson>,
dst: PathBuf) -> io::Result<()> {
|
// "schema": version,
// "crate": { parsed crate... },
// "plugins": { output of plugins... }
// }
let mut json = std::collections::BTreeMap::new();
json.insert("schema".to_string(), Json::String(SCHEMA_VERSION.to_string()));
let plugins_json = res.into_iter()
.filter_map(|opt| {
match opt {
None => None,
Some((string, json)) => {
Some((string.to_string(), json))
|
// {
|
random_line_split
|
test.rs
|
/*
|
- [ ] Dimension
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] Pair
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] EqPair
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] NeqPair
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] DimensionN
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] Context
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] DirectedContext
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] Permutation
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] PowerSet
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
*/
extern crate discrete;
use discrete::*;
fn main() {
let dir: Context<Of<Context>> = Construct::new();
let ref dim = vec![vec![2, 2], vec![2, 2]];
let count = dir.count(dim);
let mut pos = dir.zero(dim);
for i in 0..count {
dir.to_pos(dim, i, &mut pos);
print!("{:?}", pos);
println!(" => {}", dir.to_index(dim, &pos));
}
println!("count {}", count);
}
|
Zero - implement unitialized element for all spaces.
First, implement all lacking combinations.
|
random_line_split
|
test.rs
|
/*
Zero - implement unitialized element for all spaces.
First, implement all lacking combinations.
- [ ] Dimension
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] Pair
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] EqPair
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] NeqPair
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] DimensionN
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] Context
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] DirectedContext
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] Permutation
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] PowerSet
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
*/
extern crate discrete;
use discrete::*;
fn main()
|
{
let dir: Context<Of<Context>> = Construct::new();
let ref dim = vec![vec![2, 2], vec![2, 2]];
let count = dir.count(dim);
let mut pos = dir.zero(dim);
for i in 0..count {
dir.to_pos(dim, i, &mut pos);
print!("{:?}", pos);
println!(" => {}", dir.to_index(dim, &pos));
}
println!("count {}", count);
}
|
identifier_body
|
|
test.rs
|
/*
Zero - implement unitialized element for all spaces.
First, implement all lacking combinations.
- [ ] Dimension
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] Pair
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] EqPair
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] NeqPair
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] DimensionN
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] Context
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] DirectedContext
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] Permutation
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
- [ ] PowerSet
- [x] Data
- [x] Subspace<T>
- [x] Of<T>
*/
extern crate discrete;
use discrete::*;
fn
|
() {
let dir: Context<Of<Context>> = Construct::new();
let ref dim = vec![vec![2, 2], vec![2, 2]];
let count = dir.count(dim);
let mut pos = dir.zero(dim);
for i in 0..count {
dir.to_pos(dim, i, &mut pos);
print!("{:?}", pos);
println!(" => {}", dir.to_index(dim, &pos));
}
println!("count {}", count);
}
|
main
|
identifier_name
|
session.rs
|
/* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use crate::kerberos::*;
use crate::smb::smb::*;
use crate::smb::smb1_session::*;
|
#[derive(Debug)]
pub struct SMBTransactionSessionSetup {
pub request_host: Option<SessionSetupRequest>,
pub response_host: Option<SessionSetupResponse>,
pub ntlmssp: Option<NtlmsspData>,
pub krb_ticket: Option<Kerberos5Ticket>,
}
impl SMBTransactionSessionSetup {
pub fn new() -> SMBTransactionSessionSetup {
return SMBTransactionSessionSetup {
request_host: None,
response_host: None,
ntlmssp: None,
krb_ticket: None,
}
}
}
impl SMBState {
pub fn new_sessionsetup_tx(&mut self, hdr: SMBCommonHdr)
-> &mut SMBTransaction
{
let mut tx = self.new_tx();
tx.hdr = hdr;
tx.type_data = Some(SMBTransactionTypeData::SESSIONSETUP(
SMBTransactionSessionSetup::new()));
tx.request_done = true;
tx.response_done = self.tc_trunc; // no response expected if tc is truncated
SCLogDebug!("SMB: TX SESSIONSETUP created: ID {}", tx.id);
self.transactions.push(tx);
let tx_ref = self.transactions.last_mut();
return tx_ref.unwrap();
}
pub fn get_sessionsetup_tx(&mut self, hdr: SMBCommonHdr)
-> Option<&mut SMBTransaction>
{
for tx in &mut self.transactions {
let hit = tx.hdr.compare(&hdr) && match tx.type_data {
Some(SMBTransactionTypeData::SESSIONSETUP(_)) => { true },
_ => { false },
};
if hit {
return Some(tx);
}
}
return None;
}
}
|
use crate::smb::auth::*;
|
random_line_split
|
session.rs
|
/* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use crate::kerberos::*;
use crate::smb::smb::*;
use crate::smb::smb1_session::*;
use crate::smb::auth::*;
#[derive(Debug)]
pub struct SMBTransactionSessionSetup {
pub request_host: Option<SessionSetupRequest>,
pub response_host: Option<SessionSetupResponse>,
pub ntlmssp: Option<NtlmsspData>,
pub krb_ticket: Option<Kerberos5Ticket>,
}
impl SMBTransactionSessionSetup {
pub fn new() -> SMBTransactionSessionSetup {
return SMBTransactionSessionSetup {
request_host: None,
response_host: None,
ntlmssp: None,
krb_ticket: None,
}
}
}
impl SMBState {
pub fn new_sessionsetup_tx(&mut self, hdr: SMBCommonHdr)
-> &mut SMBTransaction
{
let mut tx = self.new_tx();
tx.hdr = hdr;
tx.type_data = Some(SMBTransactionTypeData::SESSIONSETUP(
SMBTransactionSessionSetup::new()));
tx.request_done = true;
tx.response_done = self.tc_trunc; // no response expected if tc is truncated
SCLogDebug!("SMB: TX SESSIONSETUP created: ID {}", tx.id);
self.transactions.push(tx);
let tx_ref = self.transactions.last_mut();
return tx_ref.unwrap();
}
pub fn get_sessionsetup_tx(&mut self, hdr: SMBCommonHdr)
-> Option<&mut SMBTransaction>
{
for tx in &mut self.transactions {
let hit = tx.hdr.compare(&hdr) && match tx.type_data {
Some(SMBTransactionTypeData::SESSIONSETUP(_)) => { true },
_ =>
|
,
};
if hit {
return Some(tx);
}
}
return None;
}
}
|
{ false }
|
conditional_block
|
session.rs
|
/* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use crate::kerberos::*;
use crate::smb::smb::*;
use crate::smb::smb1_session::*;
use crate::smb::auth::*;
#[derive(Debug)]
pub struct SMBTransactionSessionSetup {
pub request_host: Option<SessionSetupRequest>,
pub response_host: Option<SessionSetupResponse>,
pub ntlmssp: Option<NtlmsspData>,
pub krb_ticket: Option<Kerberos5Ticket>,
}
impl SMBTransactionSessionSetup {
pub fn new() -> SMBTransactionSessionSetup {
return SMBTransactionSessionSetup {
request_host: None,
response_host: None,
ntlmssp: None,
krb_ticket: None,
}
}
}
impl SMBState {
pub fn new_sessionsetup_tx(&mut self, hdr: SMBCommonHdr)
-> &mut SMBTransaction
{
let mut tx = self.new_tx();
tx.hdr = hdr;
tx.type_data = Some(SMBTransactionTypeData::SESSIONSETUP(
SMBTransactionSessionSetup::new()));
tx.request_done = true;
tx.response_done = self.tc_trunc; // no response expected if tc is truncated
SCLogDebug!("SMB: TX SESSIONSETUP created: ID {}", tx.id);
self.transactions.push(tx);
let tx_ref = self.transactions.last_mut();
return tx_ref.unwrap();
}
pub fn get_sessionsetup_tx(&mut self, hdr: SMBCommonHdr)
-> Option<&mut SMBTransaction>
|
}
|
{
for tx in &mut self.transactions {
let hit = tx.hdr.compare(&hdr) && match tx.type_data {
Some(SMBTransactionTypeData::SESSIONSETUP(_)) => { true },
_ => { false },
};
if hit {
return Some(tx);
}
}
return None;
}
|
identifier_body
|
session.rs
|
/* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use crate::kerberos::*;
use crate::smb::smb::*;
use crate::smb::smb1_session::*;
use crate::smb::auth::*;
#[derive(Debug)]
pub struct
|
{
pub request_host: Option<SessionSetupRequest>,
pub response_host: Option<SessionSetupResponse>,
pub ntlmssp: Option<NtlmsspData>,
pub krb_ticket: Option<Kerberos5Ticket>,
}
impl SMBTransactionSessionSetup {
pub fn new() -> SMBTransactionSessionSetup {
return SMBTransactionSessionSetup {
request_host: None,
response_host: None,
ntlmssp: None,
krb_ticket: None,
}
}
}
impl SMBState {
pub fn new_sessionsetup_tx(&mut self, hdr: SMBCommonHdr)
-> &mut SMBTransaction
{
let mut tx = self.new_tx();
tx.hdr = hdr;
tx.type_data = Some(SMBTransactionTypeData::SESSIONSETUP(
SMBTransactionSessionSetup::new()));
tx.request_done = true;
tx.response_done = self.tc_trunc; // no response expected if tc is truncated
SCLogDebug!("SMB: TX SESSIONSETUP created: ID {}", tx.id);
self.transactions.push(tx);
let tx_ref = self.transactions.last_mut();
return tx_ref.unwrap();
}
pub fn get_sessionsetup_tx(&mut self, hdr: SMBCommonHdr)
-> Option<&mut SMBTransaction>
{
for tx in &mut self.transactions {
let hit = tx.hdr.compare(&hdr) && match tx.type_data {
Some(SMBTransactionTypeData::SESSIONSETUP(_)) => { true },
_ => { false },
};
if hit {
return Some(tx);
}
}
return None;
}
}
|
SMBTransactionSessionSetup
|
identifier_name
|
node.rs
|
use common::dataset::DataSet;
use expr::Expr;
#[derive(Clone)]
pub struct PlanNode {
pub id: u32,
pub decl: NodeDecl,
}
#[derive(Clone)]
pub enum NodeDecl {
|
Join(Box<PlanNode>, Box<PlanNode>, JoinDecl),
Project(Box<PlanNode>, Vec<Expr>), // child and exprs
Filter(Box<PlanNode>, Vec<Expr>), // child and bool exprs
Aggregate(Box<PlanNode>, AggDecl), // child and decl
Head(Box<PlanNode>, usize), // child and row number
Tail(Box<PlanNode>, usize), // child and row number
}
/// Partitioned Table description
#[derive(Clone)]
pub struct PartitionedRelDecl;
#[derive(Clone)]
pub struct DerivedRelDecl {
block_id: u32,
exprs: Option<Vec<Expr>>,
}
#[derive(Clone)]
pub struct JoinDecl {
cond: Option<Vec<Expr>>,
filter: Option<Vec<Expr>>,
}
#[derive(Clone)]
pub struct AggDecl {
keys: Vec<Expr>,
aggrs: Vec<Expr>,
}
|
Relation(DataSet),
PartitionedRelation(PartitionedRelDecl),
DerivedRelation(DerivedRelDecl),
|
random_line_split
|
node.rs
|
use common::dataset::DataSet;
use expr::Expr;
#[derive(Clone)]
pub struct PlanNode {
pub id: u32,
pub decl: NodeDecl,
}
#[derive(Clone)]
pub enum NodeDecl {
Relation(DataSet),
PartitionedRelation(PartitionedRelDecl),
DerivedRelation(DerivedRelDecl),
Join(Box<PlanNode>, Box<PlanNode>, JoinDecl),
Project(Box<PlanNode>, Vec<Expr>), // child and exprs
Filter(Box<PlanNode>, Vec<Expr>), // child and bool exprs
Aggregate(Box<PlanNode>, AggDecl), // child and decl
Head(Box<PlanNode>, usize), // child and row number
Tail(Box<PlanNode>, usize), // child and row number
}
/// Partitioned Table description
#[derive(Clone)]
pub struct PartitionedRelDecl;
#[derive(Clone)]
pub struct DerivedRelDecl {
block_id: u32,
exprs: Option<Vec<Expr>>,
}
#[derive(Clone)]
pub struct JoinDecl {
cond: Option<Vec<Expr>>,
filter: Option<Vec<Expr>>,
}
#[derive(Clone)]
pub struct
|
{
keys: Vec<Expr>,
aggrs: Vec<Expr>,
}
|
AggDecl
|
identifier_name
|
glb.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty::{BuiltinBounds};
use middle::ty::RegionVid;
use middle::ty;
use middle::typeck::infer::combine::*;
use middle::typeck::infer::lattice::*;
use middle::typeck::infer::equate::Equate;
use middle::typeck::infer::lub::Lub;
use middle::typeck::infer::sub::Sub;
use middle::typeck::infer::{cres, InferCtxt};
use middle::typeck::infer::{TypeTrace, Subtype};
use middle::typeck::infer::fold_regions_in_sig;
use middle::typeck::infer::region_inference::RegionMark;
use syntax::ast::{Many, Once, MutImmutable, MutMutable};
use syntax::ast::{NormalFn, UnsafeFn, NodeId};
use syntax::ast::{Onceness, FnStyle};
use std::collections::HashMap;
use util::common::{indenter};
use util::ppaux::mt_to_string;
use util::ppaux::Repr;
/// "Greatest lower bound" (common subtype)
pub struct Glb<'f, 'tcx: 'f> {
fields: CombineFields<'f, 'tcx>
}
#[allow(non_snake_case)]
pub fn Glb<'f, 'tcx>(cf: CombineFields<'f, 'tcx>) -> Glb<'f, 'tcx> {
Glb { fields: cf }
}
impl<'f, 'tcx> Combine<'tcx> for Glb<'f, 'tcx> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a, 'tcx>
|
fn tag(&self) -> String { "glb".to_string() }
fn a_is_expected(&self) -> bool { self.fields.a_is_expected }
fn trace(&self) -> TypeTrace { self.fields.trace.clone() }
fn equate<'a>(&'a self) -> Equate<'a, 'tcx> { Equate(self.fields.clone()) }
fn sub<'a>(&'a self) -> Sub<'a, 'tcx> { Sub(self.fields.clone()) }
fn lub<'a>(&'a self) -> Lub<'a, 'tcx> { Lub(self.fields.clone()) }
fn glb<'a>(&'a self) -> Glb<'a, 'tcx> { Glb(self.fields.clone()) }
fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
let tcx = self.fields.infcx.tcx;
debug!("{}.mts({}, {})",
self.tag(),
mt_to_string(tcx, a),
mt_to_string(tcx, b));
match (a.mutbl, b.mutbl) {
// If one side or both is mut, then the GLB must use
// the precise type from the mut side.
(MutMutable, MutMutable) => {
let t = try!(self.equate().tys(a.ty, b.ty));
Ok(ty::mt {ty: t, mutbl: MutMutable})
}
// If one side or both is immutable, we can use the GLB of
// both sides but mutbl must be `MutImmutable`.
(MutImmutable, MutImmutable) => {
let t = try!(self.tys(a.ty, b.ty));
Ok(ty::mt {ty: t, mutbl: MutImmutable})
}
// There is no mutual subtype of these combinations.
(MutMutable, MutImmutable) |
(MutImmutable, MutMutable) => {
Err(ty::terr_mutability)
}
}
}
fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
self.lub().tys(a, b)
}
fn fn_styles(&self, a: FnStyle, b: FnStyle) -> cres<FnStyle> {
match (a, b) {
(NormalFn, _) | (_, NormalFn) => Ok(NormalFn),
(UnsafeFn, UnsafeFn) => Ok(UnsafeFn)
}
}
fn oncenesses(&self, a: Onceness, b: Onceness) -> cres<Onceness> {
match (a, b) {
(Many, _) | (_, Many) => Ok(Many),
(Once, Once) => Ok(Once)
}
}
fn builtin_bounds(&self,
a: ty::BuiltinBounds,
b: ty::BuiltinBounds)
-> cres<ty::BuiltinBounds> {
// More bounds is a subtype of fewer bounds, so
// the GLB (mutual subtype) is the union.
Ok(a.union(b))
}
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<ty::Region> {
debug!("{}.regions({}, {})",
self.tag(),
a.repr(self.fields.infcx.tcx),
b.repr(self.fields.infcx.tcx));
Ok(self.fields.infcx.region_vars.glb_regions(Subtype(self.trace()), a, b))
}
fn contraregions(&self, a: ty::Region, b: ty::Region)
-> cres<ty::Region> {
self.lub().regions(a, b)
}
fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
super_lattice_tys(self, a, b)
}
fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
// Note: this is a subtle algorithm. For a full explanation,
// please see the large comment in `region_inference.rs`.
debug!("{}.fn_sigs({}, {})",
self.tag(), a.repr(self.fields.infcx.tcx), b.repr(self.fields.infcx.tcx));
let _indenter = indenter();
// Make a mark so we can examine "all bindings that were
// created as part of this type comparison".
let mark = self.fields.infcx.region_vars.mark();
// Instantiate each bound region with a fresh region variable.
let (a_with_fresh, a_map) =
self.fields.infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), a);
let a_vars = var_ids(self, &a_map);
let (b_with_fresh, b_map) =
self.fields.infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), b);
let b_vars = var_ids(self, &b_map);
// Collect constraints.
let sig0 = try!(super_fn_sigs(self, &a_with_fresh, &b_with_fresh));
debug!("sig0 = {}", sig0.repr(self.fields.infcx.tcx));
// Generalize the regions appearing in fn_ty0 if possible
let new_vars =
self.fields.infcx.region_vars.vars_created_since_mark(mark);
let sig1 =
fold_regions_in_sig(
self.fields.infcx.tcx,
&sig0,
|r| {
generalize_region(self,
mark,
new_vars.as_slice(),
sig0.binder_id,
&a_map,
a_vars.as_slice(),
b_vars.as_slice(),
r)
});
debug!("sig1 = {}", sig1.repr(self.fields.infcx.tcx));
return Ok(sig1);
fn generalize_region(this: &Glb,
mark: RegionMark,
new_vars: &[RegionVid],
new_binder_id: NodeId,
a_map: &HashMap<ty::BoundRegion, ty::Region>,
a_vars: &[RegionVid],
b_vars: &[RegionVid],
r0: ty::Region) -> ty::Region {
if!is_var_in_set(new_vars, r0) {
assert!(!r0.is_bound());
return r0;
}
let tainted = this.fields.infcx.region_vars.tainted(mark, r0);
let mut a_r = None;
let mut b_r = None;
let mut only_new_vars = true;
for r in tainted.iter() {
if is_var_in_set(a_vars, *r) {
if a_r.is_some() {
return fresh_bound_variable(this, new_binder_id);
} else {
a_r = Some(*r);
}
} else if is_var_in_set(b_vars, *r) {
if b_r.is_some() {
return fresh_bound_variable(this, new_binder_id);
} else {
b_r = Some(*r);
}
} else if!is_var_in_set(new_vars, *r) {
only_new_vars = false;
}
}
// NB---I do not believe this algorithm computes
// (necessarily) the GLB. As written it can
// spuriously fail. In particular, if there is a case
// like: |fn(&a)| and fn(fn(&b)), where a and b are
// free, it will return fn(&c) where c = GLB(a,b). If
// however this GLB is not defined, then the result is
// an error, even though something like
// "fn<X>(fn(&X))" where X is bound would be a
// subtype of both of those.
//
// The problem is that if we were to return a bound
// variable, we'd be computing a lower-bound, but not
// necessarily the *greatest* lower-bound.
//
// Unfortunately, this problem is non-trivial to solve,
// because we do not know at the time of computing the GLB
// whether a GLB(a,b) exists or not, because we haven't
// run region inference (or indeed, even fully computed
// the region hierarchy!). The current algorithm seems to
// works ok in practice.
if a_r.is_some() && b_r.is_some() && only_new_vars {
// Related to exactly one bound variable from each fn:
return rev_lookup(this, a_map, new_binder_id, a_r.unwrap());
} else if a_r.is_none() && b_r.is_none() {
// Not related to bound variables from either fn:
assert!(!r0.is_bound());
return r0;
} else {
// Other:
return fresh_bound_variable(this, new_binder_id);
}
}
fn rev_lookup(this: &Glb,
a_map: &HashMap<ty::BoundRegion, ty::Region>,
new_binder_id: NodeId,
r: ty::Region) -> ty::Region
{
for (a_br, a_r) in a_map.iter() {
if *a_r == r {
return ty::ReLateBound(new_binder_id, *a_br);
}
}
this.fields.infcx.tcx.sess.span_bug(
this.fields.trace.origin.span(),
format!("could not find original bound region for {}",
r).as_slice())
}
fn fresh_bound_variable(this: &Glb, binder_id: NodeId) -> ty::Region {
this.fields.infcx.region_vars.new_bound(binder_id)
}
}
}
|
{ self.fields.infcx }
|
identifier_body
|
glb.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty::{BuiltinBounds};
use middle::ty::RegionVid;
use middle::ty;
use middle::typeck::infer::combine::*;
use middle::typeck::infer::lattice::*;
use middle::typeck::infer::equate::Equate;
use middle::typeck::infer::lub::Lub;
use middle::typeck::infer::sub::Sub;
use middle::typeck::infer::{cres, InferCtxt};
use middle::typeck::infer::{TypeTrace, Subtype};
use middle::typeck::infer::fold_regions_in_sig;
use middle::typeck::infer::region_inference::RegionMark;
use syntax::ast::{Many, Once, MutImmutable, MutMutable};
use syntax::ast::{NormalFn, UnsafeFn, NodeId};
use syntax::ast::{Onceness, FnStyle};
use std::collections::HashMap;
use util::common::{indenter};
use util::ppaux::mt_to_string;
use util::ppaux::Repr;
/// "Greatest lower bound" (common subtype)
pub struct Glb<'f, 'tcx: 'f> {
fields: CombineFields<'f, 'tcx>
}
#[allow(non_snake_case)]
pub fn
|
<'f, 'tcx>(cf: CombineFields<'f, 'tcx>) -> Glb<'f, 'tcx> {
Glb { fields: cf }
}
impl<'f, 'tcx> Combine<'tcx> for Glb<'f, 'tcx> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a, 'tcx> { self.fields.infcx }
fn tag(&self) -> String { "glb".to_string() }
fn a_is_expected(&self) -> bool { self.fields.a_is_expected }
fn trace(&self) -> TypeTrace { self.fields.trace.clone() }
fn equate<'a>(&'a self) -> Equate<'a, 'tcx> { Equate(self.fields.clone()) }
fn sub<'a>(&'a self) -> Sub<'a, 'tcx> { Sub(self.fields.clone()) }
fn lub<'a>(&'a self) -> Lub<'a, 'tcx> { Lub(self.fields.clone()) }
fn glb<'a>(&'a self) -> Glb<'a, 'tcx> { Glb(self.fields.clone()) }
fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
let tcx = self.fields.infcx.tcx;
debug!("{}.mts({}, {})",
self.tag(),
mt_to_string(tcx, a),
mt_to_string(tcx, b));
match (a.mutbl, b.mutbl) {
// If one side or both is mut, then the GLB must use
// the precise type from the mut side.
(MutMutable, MutMutable) => {
let t = try!(self.equate().tys(a.ty, b.ty));
Ok(ty::mt {ty: t, mutbl: MutMutable})
}
// If one side or both is immutable, we can use the GLB of
// both sides but mutbl must be `MutImmutable`.
(MutImmutable, MutImmutable) => {
let t = try!(self.tys(a.ty, b.ty));
Ok(ty::mt {ty: t, mutbl: MutImmutable})
}
// There is no mutual subtype of these combinations.
(MutMutable, MutImmutable) |
(MutImmutable, MutMutable) => {
Err(ty::terr_mutability)
}
}
}
fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
self.lub().tys(a, b)
}
fn fn_styles(&self, a: FnStyle, b: FnStyle) -> cres<FnStyle> {
match (a, b) {
(NormalFn, _) | (_, NormalFn) => Ok(NormalFn),
(UnsafeFn, UnsafeFn) => Ok(UnsafeFn)
}
}
fn oncenesses(&self, a: Onceness, b: Onceness) -> cres<Onceness> {
match (a, b) {
(Many, _) | (_, Many) => Ok(Many),
(Once, Once) => Ok(Once)
}
}
fn builtin_bounds(&self,
a: ty::BuiltinBounds,
b: ty::BuiltinBounds)
-> cres<ty::BuiltinBounds> {
// More bounds is a subtype of fewer bounds, so
// the GLB (mutual subtype) is the union.
Ok(a.union(b))
}
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<ty::Region> {
debug!("{}.regions({}, {})",
self.tag(),
a.repr(self.fields.infcx.tcx),
b.repr(self.fields.infcx.tcx));
Ok(self.fields.infcx.region_vars.glb_regions(Subtype(self.trace()), a, b))
}
fn contraregions(&self, a: ty::Region, b: ty::Region)
-> cres<ty::Region> {
self.lub().regions(a, b)
}
fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
super_lattice_tys(self, a, b)
}
fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
// Note: this is a subtle algorithm. For a full explanation,
// please see the large comment in `region_inference.rs`.
debug!("{}.fn_sigs({}, {})",
self.tag(), a.repr(self.fields.infcx.tcx), b.repr(self.fields.infcx.tcx));
let _indenter = indenter();
// Make a mark so we can examine "all bindings that were
// created as part of this type comparison".
let mark = self.fields.infcx.region_vars.mark();
// Instantiate each bound region with a fresh region variable.
let (a_with_fresh, a_map) =
self.fields.infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), a);
let a_vars = var_ids(self, &a_map);
let (b_with_fresh, b_map) =
self.fields.infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), b);
let b_vars = var_ids(self, &b_map);
// Collect constraints.
let sig0 = try!(super_fn_sigs(self, &a_with_fresh, &b_with_fresh));
debug!("sig0 = {}", sig0.repr(self.fields.infcx.tcx));
// Generalize the regions appearing in fn_ty0 if possible
let new_vars =
self.fields.infcx.region_vars.vars_created_since_mark(mark);
let sig1 =
fold_regions_in_sig(
self.fields.infcx.tcx,
&sig0,
|r| {
generalize_region(self,
mark,
new_vars.as_slice(),
sig0.binder_id,
&a_map,
a_vars.as_slice(),
b_vars.as_slice(),
r)
});
debug!("sig1 = {}", sig1.repr(self.fields.infcx.tcx));
return Ok(sig1);
fn generalize_region(this: &Glb,
mark: RegionMark,
new_vars: &[RegionVid],
new_binder_id: NodeId,
a_map: &HashMap<ty::BoundRegion, ty::Region>,
a_vars: &[RegionVid],
b_vars: &[RegionVid],
r0: ty::Region) -> ty::Region {
if!is_var_in_set(new_vars, r0) {
assert!(!r0.is_bound());
return r0;
}
let tainted = this.fields.infcx.region_vars.tainted(mark, r0);
let mut a_r = None;
let mut b_r = None;
let mut only_new_vars = true;
for r in tainted.iter() {
if is_var_in_set(a_vars, *r) {
if a_r.is_some() {
return fresh_bound_variable(this, new_binder_id);
} else {
a_r = Some(*r);
}
} else if is_var_in_set(b_vars, *r) {
if b_r.is_some() {
return fresh_bound_variable(this, new_binder_id);
} else {
b_r = Some(*r);
}
} else if!is_var_in_set(new_vars, *r) {
only_new_vars = false;
}
}
// NB---I do not believe this algorithm computes
// (necessarily) the GLB. As written it can
// spuriously fail. In particular, if there is a case
// like: |fn(&a)| and fn(fn(&b)), where a and b are
// free, it will return fn(&c) where c = GLB(a,b). If
// however this GLB is not defined, then the result is
// an error, even though something like
// "fn<X>(fn(&X))" where X is bound would be a
// subtype of both of those.
//
// The problem is that if we were to return a bound
// variable, we'd be computing a lower-bound, but not
// necessarily the *greatest* lower-bound.
//
// Unfortunately, this problem is non-trivial to solve,
// because we do not know at the time of computing the GLB
// whether a GLB(a,b) exists or not, because we haven't
// run region inference (or indeed, even fully computed
// the region hierarchy!). The current algorithm seems to
// works ok in practice.
if a_r.is_some() && b_r.is_some() && only_new_vars {
// Related to exactly one bound variable from each fn:
return rev_lookup(this, a_map, new_binder_id, a_r.unwrap());
} else if a_r.is_none() && b_r.is_none() {
// Not related to bound variables from either fn:
assert!(!r0.is_bound());
return r0;
} else {
// Other:
return fresh_bound_variable(this, new_binder_id);
}
}
fn rev_lookup(this: &Glb,
a_map: &HashMap<ty::BoundRegion, ty::Region>,
new_binder_id: NodeId,
r: ty::Region) -> ty::Region
{
for (a_br, a_r) in a_map.iter() {
if *a_r == r {
return ty::ReLateBound(new_binder_id, *a_br);
}
}
this.fields.infcx.tcx.sess.span_bug(
this.fields.trace.origin.span(),
format!("could not find original bound region for {}",
r).as_slice())
}
fn fresh_bound_variable(this: &Glb, binder_id: NodeId) -> ty::Region {
this.fields.infcx.region_vars.new_bound(binder_id)
}
}
}
|
Glb
|
identifier_name
|
glb.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty::{BuiltinBounds};
use middle::ty::RegionVid;
use middle::ty;
use middle::typeck::infer::combine::*;
use middle::typeck::infer::lattice::*;
use middle::typeck::infer::equate::Equate;
use middle::typeck::infer::lub::Lub;
use middle::typeck::infer::sub::Sub;
use middle::typeck::infer::{cres, InferCtxt};
use middle::typeck::infer::{TypeTrace, Subtype};
use middle::typeck::infer::fold_regions_in_sig;
use middle::typeck::infer::region_inference::RegionMark;
use syntax::ast::{Many, Once, MutImmutable, MutMutable};
use syntax::ast::{NormalFn, UnsafeFn, NodeId};
use syntax::ast::{Onceness, FnStyle};
use std::collections::HashMap;
use util::common::{indenter};
use util::ppaux::mt_to_string;
use util::ppaux::Repr;
/// "Greatest lower bound" (common subtype)
pub struct Glb<'f, 'tcx: 'f> {
fields: CombineFields<'f, 'tcx>
}
#[allow(non_snake_case)]
pub fn Glb<'f, 'tcx>(cf: CombineFields<'f, 'tcx>) -> Glb<'f, 'tcx> {
Glb { fields: cf }
}
impl<'f, 'tcx> Combine<'tcx> for Glb<'f, 'tcx> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a, 'tcx> { self.fields.infcx }
fn tag(&self) -> String { "glb".to_string() }
fn a_is_expected(&self) -> bool { self.fields.a_is_expected }
fn trace(&self) -> TypeTrace { self.fields.trace.clone() }
fn equate<'a>(&'a self) -> Equate<'a, 'tcx> { Equate(self.fields.clone()) }
fn sub<'a>(&'a self) -> Sub<'a, 'tcx> { Sub(self.fields.clone()) }
fn lub<'a>(&'a self) -> Lub<'a, 'tcx> { Lub(self.fields.clone()) }
fn glb<'a>(&'a self) -> Glb<'a, 'tcx> { Glb(self.fields.clone()) }
fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
let tcx = self.fields.infcx.tcx;
debug!("{}.mts({}, {})",
self.tag(),
mt_to_string(tcx, a),
mt_to_string(tcx, b));
match (a.mutbl, b.mutbl) {
// If one side or both is mut, then the GLB must use
// the precise type from the mut side.
(MutMutable, MutMutable) => {
let t = try!(self.equate().tys(a.ty, b.ty));
Ok(ty::mt {ty: t, mutbl: MutMutable})
}
// If one side or both is immutable, we can use the GLB of
// both sides but mutbl must be `MutImmutable`.
(MutImmutable, MutImmutable) => {
let t = try!(self.tys(a.ty, b.ty));
Ok(ty::mt {ty: t, mutbl: MutImmutable})
}
// There is no mutual subtype of these combinations.
(MutMutable, MutImmutable) |
(MutImmutable, MutMutable) => {
Err(ty::terr_mutability)
}
}
}
fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
self.lub().tys(a, b)
}
fn fn_styles(&self, a: FnStyle, b: FnStyle) -> cres<FnStyle> {
match (a, b) {
(NormalFn, _) | (_, NormalFn) => Ok(NormalFn),
(UnsafeFn, UnsafeFn) => Ok(UnsafeFn)
}
}
fn oncenesses(&self, a: Onceness, b: Onceness) -> cres<Onceness> {
match (a, b) {
(Many, _) | (_, Many) => Ok(Many),
(Once, Once) => Ok(Once)
}
}
fn builtin_bounds(&self,
a: ty::BuiltinBounds,
b: ty::BuiltinBounds)
-> cres<ty::BuiltinBounds> {
// More bounds is a subtype of fewer bounds, so
// the GLB (mutual subtype) is the union.
Ok(a.union(b))
}
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<ty::Region> {
debug!("{}.regions({}, {})",
self.tag(),
a.repr(self.fields.infcx.tcx),
b.repr(self.fields.infcx.tcx));
Ok(self.fields.infcx.region_vars.glb_regions(Subtype(self.trace()), a, b))
}
fn contraregions(&self, a: ty::Region, b: ty::Region)
-> cres<ty::Region> {
self.lub().regions(a, b)
}
fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
super_lattice_tys(self, a, b)
}
fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
// Note: this is a subtle algorithm. For a full explanation,
// please see the large comment in `region_inference.rs`.
debug!("{}.fn_sigs({}, {})",
self.tag(), a.repr(self.fields.infcx.tcx), b.repr(self.fields.infcx.tcx));
let _indenter = indenter();
// Make a mark so we can examine "all bindings that were
// created as part of this type comparison".
let mark = self.fields.infcx.region_vars.mark();
// Instantiate each bound region with a fresh region variable.
let (a_with_fresh, a_map) =
self.fields.infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), a);
let a_vars = var_ids(self, &a_map);
let (b_with_fresh, b_map) =
self.fields.infcx.replace_late_bound_regions_with_fresh_regions(
self.trace(), b);
let b_vars = var_ids(self, &b_map);
// Collect constraints.
let sig0 = try!(super_fn_sigs(self, &a_with_fresh, &b_with_fresh));
debug!("sig0 = {}", sig0.repr(self.fields.infcx.tcx));
// Generalize the regions appearing in fn_ty0 if possible
let new_vars =
self.fields.infcx.region_vars.vars_created_since_mark(mark);
let sig1 =
fold_regions_in_sig(
self.fields.infcx.tcx,
&sig0,
|r| {
generalize_region(self,
mark,
new_vars.as_slice(),
sig0.binder_id,
&a_map,
a_vars.as_slice(),
b_vars.as_slice(),
r)
});
debug!("sig1 = {}", sig1.repr(self.fields.infcx.tcx));
return Ok(sig1);
fn generalize_region(this: &Glb,
mark: RegionMark,
new_vars: &[RegionVid],
new_binder_id: NodeId,
a_map: &HashMap<ty::BoundRegion, ty::Region>,
a_vars: &[RegionVid],
b_vars: &[RegionVid],
r0: ty::Region) -> ty::Region {
if!is_var_in_set(new_vars, r0) {
assert!(!r0.is_bound());
return r0;
}
let tainted = this.fields.infcx.region_vars.tainted(mark, r0);
let mut a_r = None;
let mut b_r = None;
let mut only_new_vars = true;
for r in tainted.iter() {
if is_var_in_set(a_vars, *r) {
if a_r.is_some() {
return fresh_bound_variable(this, new_binder_id);
} else {
a_r = Some(*r);
}
} else if is_var_in_set(b_vars, *r) {
if b_r.is_some() {
return fresh_bound_variable(this, new_binder_id);
} else {
b_r = Some(*r);
}
} else if!is_var_in_set(new_vars, *r) {
only_new_vars = false;
}
}
// NB---I do not believe this algorithm computes
// (necessarily) the GLB. As written it can
// spuriously fail. In particular, if there is a case
// like: |fn(&a)| and fn(fn(&b)), where a and b are
// free, it will return fn(&c) where c = GLB(a,b). If
// however this GLB is not defined, then the result is
// an error, even though something like
// "fn<X>(fn(&X))" where X is bound would be a
// subtype of both of those.
//
// The problem is that if we were to return a bound
// variable, we'd be computing a lower-bound, but not
// necessarily the *greatest* lower-bound.
//
// Unfortunately, this problem is non-trivial to solve,
// because we do not know at the time of computing the GLB
// whether a GLB(a,b) exists or not, because we haven't
// run region inference (or indeed, even fully computed
// the region hierarchy!). The current algorithm seems to
// works ok in practice.
if a_r.is_some() && b_r.is_some() && only_new_vars {
// Related to exactly one bound variable from each fn:
return rev_lookup(this, a_map, new_binder_id, a_r.unwrap());
} else if a_r.is_none() && b_r.is_none() {
// Not related to bound variables from either fn:
assert!(!r0.is_bound());
return r0;
} else {
// Other:
return fresh_bound_variable(this, new_binder_id);
}
}
fn rev_lookup(this: &Glb,
a_map: &HashMap<ty::BoundRegion, ty::Region>,
new_binder_id: NodeId,
r: ty::Region) -> ty::Region
{
for (a_br, a_r) in a_map.iter() {
if *a_r == r {
return ty::ReLateBound(new_binder_id, *a_br);
}
}
this.fields.infcx.tcx.sess.span_bug(
this.fields.trace.origin.span(),
format!("could not find original bound region for {}",
r).as_slice())
}
fn fresh_bound_variable(this: &Glb, binder_id: NodeId) -> ty::Region {
|
}
}
}
|
this.fields.infcx.region_vars.new_bound(binder_id)
|
random_line_split
|
str.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! String utils for attributes and similar stuff.
#![deny(missing_docs)]
use num_traits::ToPrimitive;
use std::borrow::Cow;
use std::convert::AsRef;
use std::iter::{Filter, Peekable};
use std::str::Split;
/// A static slice of characters.
pub type StaticCharVec = &'static [char];
/// A static slice of `str`s.
pub type StaticStringVec = &'static [&'static str];
/// A "space character" according to:
///
/// <https://html.spec.whatwg.org/multipage/#space-character>
pub static HTML_SPACE_CHARACTERS: StaticCharVec =
&['\u{0020}', '\u{0009}', '\u{000a}', '\u{000c}', '\u{000d}'];
/// Whether a character is a HTML whitespace character.
#[inline]
pub fn char_is_whitespace(c: char) -> bool {
HTML_SPACE_CHARACTERS.contains(&c)
}
/// Whether all the string is HTML whitespace.
#[inline]
pub fn is_whitespace(s: &str) -> bool {
s.chars().all(char_is_whitespace)
}
#[inline]
fn not_empty(&split: &&str) -> bool {
!split.is_empty()
}
/// Split a string on HTML whitespace.
#[inline]
pub fn split_html_space_chars<'a>(
s: &'a str,
) -> Filter<Split<'a, StaticCharVec>, fn(&&str) -> bool> {
s.split(HTML_SPACE_CHARACTERS)
.filter(not_empty as fn(&&str) -> bool)
}
/// Split a string on commas.
#[inline]
pub fn split_commas<'a>(s: &'a str) -> Filter<Split<'a, char>, fn(&&str) -> bool> {
s.split(',').filter(not_empty as fn(&&str) -> bool)
}
/// Character is ascii digit
pub fn is_ascii_digit(c: &char) -> bool {
match *c {
'0'..='9' => true,
_ => false,
}
}
fn is_decimal_point(c: char) -> bool {
c == '.'
}
fn is_exponent_char(c: char) -> bool {
match c {
'e' | 'E' => true,
_ => false,
}
}
/// Read a set of ascii digits and read them into a number.
pub fn read_numbers<I: Iterator<Item = char>>(mut iter: Peekable<I>) -> (Option<i64>, usize) {
match iter.peek() {
Some(c) if is_ascii_digit(c) => (),
_ => return (None, 0),
}
iter.take_while(is_ascii_digit)
.map(|d| d as i64 - '0' as i64)
.fold((Some(0i64), 0), |accumulator, d| {
let digits = accumulator
.0
.and_then(|accumulator| accumulator.checked_mul(10))
.and_then(|accumulator| accumulator.checked_add(d));
(digits, accumulator.1 + 1)
})
}
/// Read a decimal fraction.
pub fn read_fraction<I: Iterator<Item = char>>(
mut iter: Peekable<I>,
mut divisor: f64,
value: f64,
) -> (f64, usize) {
match iter.peek() {
Some(c) if is_decimal_point(*c) => (),
_ => return (value, 0),
}
iter.next();
iter.take_while(is_ascii_digit)
.map(|d| d as i64 - '0' as i64)
.fold((value, 1), |accumulator, d| {
divisor *= 10f64;
(accumulator.0 + d as f64 / divisor, accumulator.1 + 1)
})
}
/// Reads an exponent from an iterator over chars, for example `e100`.
pub fn read_exponent<I: Iterator<Item = char>>(mut iter: Peekable<I>) -> Option<i32> {
match iter.peek() {
Some(c) if is_exponent_char(*c) => (),
_ => return None,
}
iter.next();
match iter.peek() {
None => None,
Some(&'-') => {
iter.next();
read_numbers(iter).0.map(|exp| -exp.to_i32().unwrap_or(0))
},
Some(&'+') => {
iter.next();
read_numbers(iter).0.map(|exp| exp.to_i32().unwrap_or(0))
},
Some(_) => read_numbers(iter).0.map(|exp| exp.to_i32().unwrap_or(0)),
}
}
/// Join a set of strings with a given delimiter `join`.
pub fn str_join<I, T>(strs: I, join: &str) -> String
where
I: IntoIterator<Item = T>,
T: AsRef<str>,
{
strs.into_iter()
.enumerate()
.fold(String::new(), |mut acc, (i, s)| {
if i > 0 {
acc.push_str(join);
}
acc.push_str(s.as_ref());
acc
})
}
/// Returns true if a given string has a given prefix with case-insensitive match.
pub fn starts_with_ignore_ascii_case(string: &str, prefix: &str) -> bool {
string.len() >= prefix.len() &&
string.as_bytes()[0..prefix.len()].eq_ignore_ascii_case(prefix.as_bytes())
}
/// Returns an ascii lowercase version of a string, only allocating if needed.
pub fn
|
<'a>(input: &'a str) -> Cow<'a, str> {
if input.bytes().any(|c| matches!(c, b'A'..=b'Z')) {
input.to_ascii_lowercase().into()
} else {
// Already ascii lowercase.
Cow::Borrowed(input)
}
}
/// To avoid accidentally instantiating multiple monomorphizations of large
/// serialization routines, we define explicit concrete types and require
/// them in those routines. This avoids accidental mixing of String and
/// nsACString arguments in Gecko, which would cause code size to blow up.
#[cfg(feature = "gecko")]
pub type CssStringWriter = ::nsstring::nsACString;
/// String type that coerces to CssStringWriter, used when serialization code
/// needs to allocate a temporary string.
#[cfg(feature = "gecko")]
pub type CssString = ::nsstring::nsCString;
/// String. The comments for the Gecko types explain the need for this abstraction.
#[cfg(feature = "servo")]
pub type CssStringWriter = String;
/// String. The comments for the Gecko types explain the need for this abstraction.
#[cfg(feature = "servo")]
pub type CssString = String;
|
string_as_ascii_lowercase
|
identifier_name
|
str.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! String utils for attributes and similar stuff.
#![deny(missing_docs)]
use num_traits::ToPrimitive;
use std::borrow::Cow;
use std::convert::AsRef;
use std::iter::{Filter, Peekable};
use std::str::Split;
/// A static slice of characters.
pub type StaticCharVec = &'static [char];
/// A static slice of `str`s.
pub type StaticStringVec = &'static [&'static str];
/// A "space character" according to:
///
/// <https://html.spec.whatwg.org/multipage/#space-character>
pub static HTML_SPACE_CHARACTERS: StaticCharVec =
&['\u{0020}', '\u{0009}', '\u{000a}', '\u{000c}', '\u{000d}'];
/// Whether a character is a HTML whitespace character.
#[inline]
pub fn char_is_whitespace(c: char) -> bool {
HTML_SPACE_CHARACTERS.contains(&c)
}
/// Whether all the string is HTML whitespace.
#[inline]
pub fn is_whitespace(s: &str) -> bool {
s.chars().all(char_is_whitespace)
}
#[inline]
fn not_empty(&split: &&str) -> bool {
!split.is_empty()
}
/// Split a string on HTML whitespace.
#[inline]
pub fn split_html_space_chars<'a>(
s: &'a str,
) -> Filter<Split<'a, StaticCharVec>, fn(&&str) -> bool> {
s.split(HTML_SPACE_CHARACTERS)
.filter(not_empty as fn(&&str) -> bool)
}
/// Split a string on commas.
#[inline]
pub fn split_commas<'a>(s: &'a str) -> Filter<Split<'a, char>, fn(&&str) -> bool> {
s.split(',').filter(not_empty as fn(&&str) -> bool)
}
/// Character is ascii digit
pub fn is_ascii_digit(c: &char) -> bool {
match *c {
'0'..='9' => true,
_ => false,
}
}
fn is_decimal_point(c: char) -> bool {
c == '.'
}
fn is_exponent_char(c: char) -> bool {
match c {
'e' | 'E' => true,
_ => false,
}
}
/// Read a set of ascii digits and read them into a number.
pub fn read_numbers<I: Iterator<Item = char>>(mut iter: Peekable<I>) -> (Option<i64>, usize)
|
/// Read a decimal fraction.
pub fn read_fraction<I: Iterator<Item = char>>(
mut iter: Peekable<I>,
mut divisor: f64,
value: f64,
) -> (f64, usize) {
match iter.peek() {
Some(c) if is_decimal_point(*c) => (),
_ => return (value, 0),
}
iter.next();
iter.take_while(is_ascii_digit)
.map(|d| d as i64 - '0' as i64)
.fold((value, 1), |accumulator, d| {
divisor *= 10f64;
(accumulator.0 + d as f64 / divisor, accumulator.1 + 1)
})
}
/// Reads an exponent from an iterator over chars, for example `e100`.
pub fn read_exponent<I: Iterator<Item = char>>(mut iter: Peekable<I>) -> Option<i32> {
match iter.peek() {
Some(c) if is_exponent_char(*c) => (),
_ => return None,
}
iter.next();
match iter.peek() {
None => None,
Some(&'-') => {
iter.next();
read_numbers(iter).0.map(|exp| -exp.to_i32().unwrap_or(0))
},
Some(&'+') => {
iter.next();
read_numbers(iter).0.map(|exp| exp.to_i32().unwrap_or(0))
},
Some(_) => read_numbers(iter).0.map(|exp| exp.to_i32().unwrap_or(0)),
}
}
/// Join a set of strings with a given delimiter `join`.
pub fn str_join<I, T>(strs: I, join: &str) -> String
where
I: IntoIterator<Item = T>,
T: AsRef<str>,
{
strs.into_iter()
.enumerate()
.fold(String::new(), |mut acc, (i, s)| {
if i > 0 {
acc.push_str(join);
}
acc.push_str(s.as_ref());
acc
})
}
/// Returns true if a given string has a given prefix with case-insensitive match.
pub fn starts_with_ignore_ascii_case(string: &str, prefix: &str) -> bool {
string.len() >= prefix.len() &&
string.as_bytes()[0..prefix.len()].eq_ignore_ascii_case(prefix.as_bytes())
}
/// Returns an ascii lowercase version of a string, only allocating if needed.
pub fn string_as_ascii_lowercase<'a>(input: &'a str) -> Cow<'a, str> {
if input.bytes().any(|c| matches!(c, b'A'..=b'Z')) {
input.to_ascii_lowercase().into()
} else {
// Already ascii lowercase.
Cow::Borrowed(input)
}
}
/// To avoid accidentally instantiating multiple monomorphizations of large
/// serialization routines, we define explicit concrete types and require
/// them in those routines. This avoids accidental mixing of String and
/// nsACString arguments in Gecko, which would cause code size to blow up.
#[cfg(feature = "gecko")]
pub type CssStringWriter = ::nsstring::nsACString;
/// String type that coerces to CssStringWriter, used when serialization code
/// needs to allocate a temporary string.
#[cfg(feature = "gecko")]
pub type CssString = ::nsstring::nsCString;
/// String. The comments for the Gecko types explain the need for this abstraction.
#[cfg(feature = "servo")]
pub type CssStringWriter = String;
/// String. The comments for the Gecko types explain the need for this abstraction.
#[cfg(feature = "servo")]
pub type CssString = String;
|
{
match iter.peek() {
Some(c) if is_ascii_digit(c) => (),
_ => return (None, 0),
}
iter.take_while(is_ascii_digit)
.map(|d| d as i64 - '0' as i64)
.fold((Some(0i64), 0), |accumulator, d| {
let digits = accumulator
.0
.and_then(|accumulator| accumulator.checked_mul(10))
.and_then(|accumulator| accumulator.checked_add(d));
(digits, accumulator.1 + 1)
})
}
|
identifier_body
|
str.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! String utils for attributes and similar stuff.
#![deny(missing_docs)]
use num_traits::ToPrimitive;
use std::borrow::Cow;
use std::convert::AsRef;
use std::iter::{Filter, Peekable};
use std::str::Split;
/// A static slice of characters.
pub type StaticCharVec = &'static [char];
/// A static slice of `str`s.
pub type StaticStringVec = &'static [&'static str];
/// A "space character" according to:
///
/// <https://html.spec.whatwg.org/multipage/#space-character>
pub static HTML_SPACE_CHARACTERS: StaticCharVec =
&['\u{0020}', '\u{0009}', '\u{000a}', '\u{000c}', '\u{000d}'];
/// Whether a character is a HTML whitespace character.
#[inline]
pub fn char_is_whitespace(c: char) -> bool {
HTML_SPACE_CHARACTERS.contains(&c)
}
/// Whether all the string is HTML whitespace.
#[inline]
pub fn is_whitespace(s: &str) -> bool {
s.chars().all(char_is_whitespace)
}
#[inline]
fn not_empty(&split: &&str) -> bool {
!split.is_empty()
}
/// Split a string on HTML whitespace.
#[inline]
pub fn split_html_space_chars<'a>(
s: &'a str,
|
) -> Filter<Split<'a, StaticCharVec>, fn(&&str) -> bool> {
s.split(HTML_SPACE_CHARACTERS)
.filter(not_empty as fn(&&str) -> bool)
}
/// Split a string on commas.
#[inline]
pub fn split_commas<'a>(s: &'a str) -> Filter<Split<'a, char>, fn(&&str) -> bool> {
s.split(',').filter(not_empty as fn(&&str) -> bool)
}
/// Character is ascii digit
pub fn is_ascii_digit(c: &char) -> bool {
match *c {
'0'..='9' => true,
_ => false,
}
}
fn is_decimal_point(c: char) -> bool {
c == '.'
}
fn is_exponent_char(c: char) -> bool {
match c {
'e' | 'E' => true,
_ => false,
}
}
/// Read a set of ascii digits and read them into a number.
pub fn read_numbers<I: Iterator<Item = char>>(mut iter: Peekable<I>) -> (Option<i64>, usize) {
match iter.peek() {
Some(c) if is_ascii_digit(c) => (),
_ => return (None, 0),
}
iter.take_while(is_ascii_digit)
.map(|d| d as i64 - '0' as i64)
.fold((Some(0i64), 0), |accumulator, d| {
let digits = accumulator
.0
.and_then(|accumulator| accumulator.checked_mul(10))
.and_then(|accumulator| accumulator.checked_add(d));
(digits, accumulator.1 + 1)
})
}
/// Read a decimal fraction.
pub fn read_fraction<I: Iterator<Item = char>>(
mut iter: Peekable<I>,
mut divisor: f64,
value: f64,
) -> (f64, usize) {
match iter.peek() {
Some(c) if is_decimal_point(*c) => (),
_ => return (value, 0),
}
iter.next();
iter.take_while(is_ascii_digit)
.map(|d| d as i64 - '0' as i64)
.fold((value, 1), |accumulator, d| {
divisor *= 10f64;
(accumulator.0 + d as f64 / divisor, accumulator.1 + 1)
})
}
/// Reads an exponent from an iterator over chars, for example `e100`.
pub fn read_exponent<I: Iterator<Item = char>>(mut iter: Peekable<I>) -> Option<i32> {
match iter.peek() {
Some(c) if is_exponent_char(*c) => (),
_ => return None,
}
iter.next();
match iter.peek() {
None => None,
Some(&'-') => {
iter.next();
read_numbers(iter).0.map(|exp| -exp.to_i32().unwrap_or(0))
},
Some(&'+') => {
iter.next();
read_numbers(iter).0.map(|exp| exp.to_i32().unwrap_or(0))
},
Some(_) => read_numbers(iter).0.map(|exp| exp.to_i32().unwrap_or(0)),
}
}
/// Join a set of strings with a given delimiter `join`.
pub fn str_join<I, T>(strs: I, join: &str) -> String
where
I: IntoIterator<Item = T>,
T: AsRef<str>,
{
strs.into_iter()
.enumerate()
.fold(String::new(), |mut acc, (i, s)| {
if i > 0 {
acc.push_str(join);
}
acc.push_str(s.as_ref());
acc
})
}
/// Returns true if a given string has a given prefix with case-insensitive match.
pub fn starts_with_ignore_ascii_case(string: &str, prefix: &str) -> bool {
string.len() >= prefix.len() &&
string.as_bytes()[0..prefix.len()].eq_ignore_ascii_case(prefix.as_bytes())
}
/// Returns an ascii lowercase version of a string, only allocating if needed.
pub fn string_as_ascii_lowercase<'a>(input: &'a str) -> Cow<'a, str> {
if input.bytes().any(|c| matches!(c, b'A'..=b'Z')) {
input.to_ascii_lowercase().into()
} else {
// Already ascii lowercase.
Cow::Borrowed(input)
}
}
/// To avoid accidentally instantiating multiple monomorphizations of large
/// serialization routines, we define explicit concrete types and require
/// them in those routines. This avoids accidental mixing of String and
/// nsACString arguments in Gecko, which would cause code size to blow up.
#[cfg(feature = "gecko")]
pub type CssStringWriter = ::nsstring::nsACString;
/// String type that coerces to CssStringWriter, used when serialization code
/// needs to allocate a temporary string.
#[cfg(feature = "gecko")]
pub type CssString = ::nsstring::nsCString;
/// String. The comments for the Gecko types explain the need for this abstraction.
#[cfg(feature = "servo")]
pub type CssStringWriter = String;
/// String. The comments for the Gecko types explain the need for this abstraction.
#[cfg(feature = "servo")]
pub type CssString = String;
|
random_line_split
|
|
lib.rs
|
#![feature(plugin)]
#![plugin(interpolate_idents)]
#[macro_use]
extern crate apache2;
use apache2::{Request, Status, server_banner, server_description, server_built, show_mpm,
apr_version_string, apu_version_string, Cookie, time_now};
apache2_module!(info_rs, b"mod_info_rs\0");
fn unwrap_str<'a>(wrapped: Option<&'a str>) -> &'a str {
wrapped.unwrap_or("--")
}
fn info_rs_handler(r: &mut Request) -> Result<Status, ()> {
if get!(r.handler())!= "server-info-rs" {
return Ok(Status::DECLINED)
}
r.set_content_type("text/html");
r.set_last_modified(time_now());
try!(r.write("<!doctype html><html><head><meta charset=\"utf-8\"><title>Apache Info</title></head><body>"));
try!(r.write("<h1>Apache Server Information</h1>"));
let server_name = try!(
r.escape_html(
unwrap_str(r.server_name())
)
);
let server_port = r.server_port();
let local_ip = unwrap_str(get!(r.connection()).local_ip());
try!(r.write(format!("<p>Server: {}:{} (via {})</p>", server_name, server_port, local_ip)));
let description = unwrap_str(server_description());
let banner = unwrap_str(server_banner());
try!(r.write(format!("<p>Server Description/Banner: {} / {}</p>", description, banner)));
let mmp = unwrap_str(show_mpm());
try!(r.write(format!("<p>Server MPM: {}</p>", mmp)));
let built = unwrap_str(server_built());
try!(r.write(format!("<p>Server Built: {}</p>", built)));
let apr_version = unwrap_str(apr_version_string());
try!(r.write(format!("<p>Server loaded APR Version: {}</p>", apr_version)));
let apu_version = unwrap_str(apu_version_string());
try!(r.write(format!("<p>Server loaded APU Version: {}</p>", apu_version)));
let document_root = unwrap_str(r.document_root());
try!(r.write(format!("<p>Document Root: {}</p>", document_root)));
try!(r.write("<hr />"));
try!(r.write("<h2>Current Request Information</h2>"));
let client_ip = unwrap_str(get!(r.connection()).client_ip());
try!(r.write(format!("<p>Client IP: {}</p>", client_ip)));
let useragent_ip = unwrap_str(r.useragent_ip());
try!(r.write(format!("<p>Useragent IP: {}</p>", useragent_ip)));
let hostname = unwrap_str(r.hostname());
try!(r.write(format!("<p>Hostname: {}</p>", hostname)));
let the_request = unwrap_str(r.the_request());
try!(r.write(format!("<p>Request: {}</p>", the_request)));
let protocol = unwrap_str(r.protocol());
try!(r.write(format!("<p>Protocol: {}</p>", protocol)));
let http_scheme = unwrap_str(r.http_scheme());
try!(r.write(format!("<p>HTTP Scheme: {}</p>", http_scheme)));
try!(r.write(format!("<p>HTTP/0.9: {:?}</p>", r.assbackwards())));
let method = unwrap_str(r.method());
try!(r.write(format!("<p>Method: {}</p>", method)));
let unparsed_uri = unwrap_str(r.unparsed_uri());
try!(r.write(format!("<p>Unparsed URI: {}</p>", unparsed_uri)));
let uri = unwrap_str(r.uri());
try!(r.write(format!("<p>URI: {}</p>", uri)));
let args = unwrap_str(r.args());
try!(r.write(format!("<p>Request Args: {}</p>", args)));
let content_type = unwrap_str(r.content_type());
try!(r.write(format!("<p>Content Type: {}</p>", content_type)));
let content_encoding = unwrap_str(r.content_encoding());
try!(r.write(format!("<p>Content Encoding: {}</p>", content_encoding)));
try!(r.write(format!("<p>Content Length: {}</p>", r.clength())));
try!(r.write(format!("<p>Is Initial Request: {}</p>", r.is_initial_req())));
let context_document_root = unwrap_str(r.context_document_root());
try!(r.write(format!("<p>Context Document Root: {}</p>", context_document_root)));
let context_prefix = unwrap_str(r.context_prefix());
try!(r.write(format!("<p>Context Prefix: {}</p>", context_prefix)));
let range = unwrap_str(r.range());
try!(r.write(format!("<p>Range: {}</p>", range)));
let handler = unwrap_str(r.handler());
try!(r.write(format!("<p>Handler: {}</p>", handler)));
let path_info = unwrap_str(r.path_info());
try!(r.write(format!("<p>Path Info: {}</p>", path_info)));
let filename = unwrap_str(r.filename());
try!(r.write(format!("<p>Filename: {}</p>", filename)));
let canonical_filename = unwrap_str(r.canonical_filename());
try!(r.write(format!("<p>Canonical Filename: {}</p>", canonical_filename)));
let request_time = try!(r.rfc822_date(r.request_time()));
try!(r.write(format!("<p>Request Time: {} / {}</p>", request_time, r.request_time())));
let mtime = try!(r.rfc822_date(r.mtime()));
try!(r.write(format!("<p>Last modified time: {} / {}</p>", mtime, r.mtime())));
let log_id = unwrap_str(r.log_id());
try!(r.write(format!("<p>Log ID: {}</p>", log_id)));
let user = unwrap_str(r.user());
try!(r.write(format!("<p>User: {}</p>", user)));
try!(r.write(format!("<p>Some Auth Required: {}</p>", r.some_auth_required())));
let ap_auth_type = unwrap_str(r.ap_auth_type());
try!(r.write(format!("<p>Auth Type: {}</p>", ap_auth_type)));
let auth_name = unwrap_str(r.auth_name());
try!(r.write(format!("<p>Auth Name: {}</p>", auth_name)));
let basic_auth_pw = unwrap_str(r.basic_auth_pw());
try!(r.write(format!("<p>Basic Auth PW: {}</p>", basic_auth_pw)));
try!(r.write(format!("<p>Default Port: {}</p>", r.default_port())));
try!(r.write(format!("<p>ProxyReq: {}</p>", r.proxyreq())));
let key = "sample_cookie";
let val = "info_rs";
match r.cookie(key) {
None => {
let mut cookie = Cookie::new(key, val);
cookie.expires = Some(time_now() + 1000000 * 30);
r.set_cookie(cookie);
try!(r.write(format!("<p>New Cookie – {}: {}</p>", key, val)));
},
Some(stored) => {
try!(r.write(format!("<p>Cookie – {}: {}</p>", key, stored)));
}
};
try!(r.write("<h3>Request Headers</h3>"));
let headers_in = get!(r.headers_in());
for (key, val) in headers_in.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
try!(r.write("<h3>Headers Out</h3>"));
let headers_out = get!(r.headers_out());
for (key, val) in headers_out.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
|
for (key, val) in err_headers_out.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
try!(r.write("<h3>Notes</h3>"));
let notes = get!(r.notes());
for (key, val) in notes.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
try!(r.write("<h3>Subprocess Environment</h3>"));
let subprocess_env = get!(r.subprocess_env());
for (key, val) in subprocess_env.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
try!(r.write("<h3>Request API check</h3>"));
let original = "Բարեւ, Héébee, გამარჯობა, Witôjze, Здраво, Ciao";
let encoded = try!(r.base64_encode(original));
let plain = try!(r.base64_decode(encoded));
try!(r.write(format!("<p>Original Text: {}</p>", original)));
try!(r.write(format!("<p>Base64 Encoded: {}</p>", encoded)));
try!(r.write(format!("<p>Base64 Decoded: {}</p>", plain)));
let original_url = "http://foo.bar/1 2 3 & 4 + 5";
let encoded_url = try!(r.escape_urlencoded(original_url));
let plain_url = try!(r.unescape_urlencoded(encoded_url));
try!(r.write(format!("<p>Original URL: {}</p>", original_url)));
try!(r.write(format!("<p>Encoded URL: {}</p>", encoded_url)));
try!(r.write(format!("<p>Decoded URL: {}</p>", plain_url)));
let date = try!(r.rfc822_date(0));
try!(r.write(format!("<p>RFC 822 Date: {}</p>", date)));
try!(r.write("</body></html>"));
Ok(Status::OK)
}
|
try!(r.write("<h3>Err Headers Out</h3>"));
let err_headers_out = get!(r.err_headers_out());
|
random_line_split
|
lib.rs
|
#![feature(plugin)]
#![plugin(interpolate_idents)]
#[macro_use]
extern crate apache2;
use apache2::{Request, Status, server_banner, server_description, server_built, show_mpm,
apr_version_string, apu_version_string, Cookie, time_now};
apache2_module!(info_rs, b"mod_info_rs\0");
fn unwrap_str<'a>(wrapped: Option<&'a str>) -> &'a str {
wrapped.unwrap_or("--")
}
fn info_rs_handler(r: &mut Request) -> Result<Status, ()> {
if get!(r.handler())!= "server-info-rs"
|
r.set_content_type("text/html");
r.set_last_modified(time_now());
try!(r.write("<!doctype html><html><head><meta charset=\"utf-8\"><title>Apache Info</title></head><body>"));
try!(r.write("<h1>Apache Server Information</h1>"));
let server_name = try!(
r.escape_html(
unwrap_str(r.server_name())
)
);
let server_port = r.server_port();
let local_ip = unwrap_str(get!(r.connection()).local_ip());
try!(r.write(format!("<p>Server: {}:{} (via {})</p>", server_name, server_port, local_ip)));
let description = unwrap_str(server_description());
let banner = unwrap_str(server_banner());
try!(r.write(format!("<p>Server Description/Banner: {} / {}</p>", description, banner)));
let mmp = unwrap_str(show_mpm());
try!(r.write(format!("<p>Server MPM: {}</p>", mmp)));
let built = unwrap_str(server_built());
try!(r.write(format!("<p>Server Built: {}</p>", built)));
let apr_version = unwrap_str(apr_version_string());
try!(r.write(format!("<p>Server loaded APR Version: {}</p>", apr_version)));
let apu_version = unwrap_str(apu_version_string());
try!(r.write(format!("<p>Server loaded APU Version: {}</p>", apu_version)));
let document_root = unwrap_str(r.document_root());
try!(r.write(format!("<p>Document Root: {}</p>", document_root)));
try!(r.write("<hr />"));
try!(r.write("<h2>Current Request Information</h2>"));
let client_ip = unwrap_str(get!(r.connection()).client_ip());
try!(r.write(format!("<p>Client IP: {}</p>", client_ip)));
let useragent_ip = unwrap_str(r.useragent_ip());
try!(r.write(format!("<p>Useragent IP: {}</p>", useragent_ip)));
let hostname = unwrap_str(r.hostname());
try!(r.write(format!("<p>Hostname: {}</p>", hostname)));
let the_request = unwrap_str(r.the_request());
try!(r.write(format!("<p>Request: {}</p>", the_request)));
let protocol = unwrap_str(r.protocol());
try!(r.write(format!("<p>Protocol: {}</p>", protocol)));
let http_scheme = unwrap_str(r.http_scheme());
try!(r.write(format!("<p>HTTP Scheme: {}</p>", http_scheme)));
try!(r.write(format!("<p>HTTP/0.9: {:?}</p>", r.assbackwards())));
let method = unwrap_str(r.method());
try!(r.write(format!("<p>Method: {}</p>", method)));
let unparsed_uri = unwrap_str(r.unparsed_uri());
try!(r.write(format!("<p>Unparsed URI: {}</p>", unparsed_uri)));
let uri = unwrap_str(r.uri());
try!(r.write(format!("<p>URI: {}</p>", uri)));
let args = unwrap_str(r.args());
try!(r.write(format!("<p>Request Args: {}</p>", args)));
let content_type = unwrap_str(r.content_type());
try!(r.write(format!("<p>Content Type: {}</p>", content_type)));
let content_encoding = unwrap_str(r.content_encoding());
try!(r.write(format!("<p>Content Encoding: {}</p>", content_encoding)));
try!(r.write(format!("<p>Content Length: {}</p>", r.clength())));
try!(r.write(format!("<p>Is Initial Request: {}</p>", r.is_initial_req())));
let context_document_root = unwrap_str(r.context_document_root());
try!(r.write(format!("<p>Context Document Root: {}</p>", context_document_root)));
let context_prefix = unwrap_str(r.context_prefix());
try!(r.write(format!("<p>Context Prefix: {}</p>", context_prefix)));
let range = unwrap_str(r.range());
try!(r.write(format!("<p>Range: {}</p>", range)));
let handler = unwrap_str(r.handler());
try!(r.write(format!("<p>Handler: {}</p>", handler)));
let path_info = unwrap_str(r.path_info());
try!(r.write(format!("<p>Path Info: {}</p>", path_info)));
let filename = unwrap_str(r.filename());
try!(r.write(format!("<p>Filename: {}</p>", filename)));
let canonical_filename = unwrap_str(r.canonical_filename());
try!(r.write(format!("<p>Canonical Filename: {}</p>", canonical_filename)));
let request_time = try!(r.rfc822_date(r.request_time()));
try!(r.write(format!("<p>Request Time: {} / {}</p>", request_time, r.request_time())));
let mtime = try!(r.rfc822_date(r.mtime()));
try!(r.write(format!("<p>Last modified time: {} / {}</p>", mtime, r.mtime())));
let log_id = unwrap_str(r.log_id());
try!(r.write(format!("<p>Log ID: {}</p>", log_id)));
let user = unwrap_str(r.user());
try!(r.write(format!("<p>User: {}</p>", user)));
try!(r.write(format!("<p>Some Auth Required: {}</p>", r.some_auth_required())));
let ap_auth_type = unwrap_str(r.ap_auth_type());
try!(r.write(format!("<p>Auth Type: {}</p>", ap_auth_type)));
let auth_name = unwrap_str(r.auth_name());
try!(r.write(format!("<p>Auth Name: {}</p>", auth_name)));
let basic_auth_pw = unwrap_str(r.basic_auth_pw());
try!(r.write(format!("<p>Basic Auth PW: {}</p>", basic_auth_pw)));
try!(r.write(format!("<p>Default Port: {}</p>", r.default_port())));
try!(r.write(format!("<p>ProxyReq: {}</p>", r.proxyreq())));
let key = "sample_cookie";
let val = "info_rs";
match r.cookie(key) {
None => {
let mut cookie = Cookie::new(key, val);
cookie.expires = Some(time_now() + 1000000 * 30);
r.set_cookie(cookie);
try!(r.write(format!("<p>New Cookie – {}: {}</p>", key, val)));
},
Some(stored) => {
try!(r.write(format!("<p>Cookie – {}: {}</p>", key, stored)));
}
};
try!(r.write("<h3>Request Headers</h3>"));
let headers_in = get!(r.headers_in());
for (key, val) in headers_in.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
try!(r.write("<h3>Headers Out</h3>"));
let headers_out = get!(r.headers_out());
for (key, val) in headers_out.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
try!(r.write("<h3>Err Headers Out</h3>"));
let err_headers_out = get!(r.err_headers_out());
for (key, val) in err_headers_out.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
try!(r.write("<h3>Notes</h3>"));
let notes = get!(r.notes());
for (key, val) in notes.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
try!(r.write("<h3>Subprocess Environment</h3>"));
let subprocess_env = get!(r.subprocess_env());
for (key, val) in subprocess_env.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
try!(r.write("<h3>Request API check</h3>"));
let original = "Բարեւ, Héébee, გამარჯობა, Witôjze, Здраво, Ciao";
let encoded = try!(r.base64_encode(original));
let plain = try!(r.base64_decode(encoded));
try!(r.write(format!("<p>Original Text: {}</p>", original)));
try!(r.write(format!("<p>Base64 Encoded: {}</p>", encoded)));
try!(r.write(format!("<p>Base64 Decoded: {}</p>", plain)));
let original_url = "http://foo.bar/1 2 3 & 4 + 5";
let encoded_url = try!(r.escape_urlencoded(original_url));
let plain_url = try!(r.unescape_urlencoded(encoded_url));
try!(r.write(format!("<p>Original URL: {}</p>", original_url)));
try!(r.write(format!("<p>Encoded URL: {}</p>", encoded_url)));
try!(r.write(format!("<p>Decoded URL: {}</p>", plain_url)));
let date = try!(r.rfc822_date(0));
try!(r.write(format!("<p>RFC 822 Date: {}</p>", date)));
try!(r.write("</body></html>"));
Ok(Status::OK)
}
|
{
return Ok(Status::DECLINED)
}
|
conditional_block
|
lib.rs
|
#![feature(plugin)]
#![plugin(interpolate_idents)]
#[macro_use]
extern crate apache2;
use apache2::{Request, Status, server_banner, server_description, server_built, show_mpm,
apr_version_string, apu_version_string, Cookie, time_now};
apache2_module!(info_rs, b"mod_info_rs\0");
fn
|
<'a>(wrapped: Option<&'a str>) -> &'a str {
wrapped.unwrap_or("--")
}
fn info_rs_handler(r: &mut Request) -> Result<Status, ()> {
if get!(r.handler())!= "server-info-rs" {
return Ok(Status::DECLINED)
}
r.set_content_type("text/html");
r.set_last_modified(time_now());
try!(r.write("<!doctype html><html><head><meta charset=\"utf-8\"><title>Apache Info</title></head><body>"));
try!(r.write("<h1>Apache Server Information</h1>"));
let server_name = try!(
r.escape_html(
unwrap_str(r.server_name())
)
);
let server_port = r.server_port();
let local_ip = unwrap_str(get!(r.connection()).local_ip());
try!(r.write(format!("<p>Server: {}:{} (via {})</p>", server_name, server_port, local_ip)));
let description = unwrap_str(server_description());
let banner = unwrap_str(server_banner());
try!(r.write(format!("<p>Server Description/Banner: {} / {}</p>", description, banner)));
let mmp = unwrap_str(show_mpm());
try!(r.write(format!("<p>Server MPM: {}</p>", mmp)));
let built = unwrap_str(server_built());
try!(r.write(format!("<p>Server Built: {}</p>", built)));
let apr_version = unwrap_str(apr_version_string());
try!(r.write(format!("<p>Server loaded APR Version: {}</p>", apr_version)));
let apu_version = unwrap_str(apu_version_string());
try!(r.write(format!("<p>Server loaded APU Version: {}</p>", apu_version)));
let document_root = unwrap_str(r.document_root());
try!(r.write(format!("<p>Document Root: {}</p>", document_root)));
try!(r.write("<hr />"));
try!(r.write("<h2>Current Request Information</h2>"));
let client_ip = unwrap_str(get!(r.connection()).client_ip());
try!(r.write(format!("<p>Client IP: {}</p>", client_ip)));
let useragent_ip = unwrap_str(r.useragent_ip());
try!(r.write(format!("<p>Useragent IP: {}</p>", useragent_ip)));
let hostname = unwrap_str(r.hostname());
try!(r.write(format!("<p>Hostname: {}</p>", hostname)));
let the_request = unwrap_str(r.the_request());
try!(r.write(format!("<p>Request: {}</p>", the_request)));
let protocol = unwrap_str(r.protocol());
try!(r.write(format!("<p>Protocol: {}</p>", protocol)));
let http_scheme = unwrap_str(r.http_scheme());
try!(r.write(format!("<p>HTTP Scheme: {}</p>", http_scheme)));
try!(r.write(format!("<p>HTTP/0.9: {:?}</p>", r.assbackwards())));
let method = unwrap_str(r.method());
try!(r.write(format!("<p>Method: {}</p>", method)));
let unparsed_uri = unwrap_str(r.unparsed_uri());
try!(r.write(format!("<p>Unparsed URI: {}</p>", unparsed_uri)));
let uri = unwrap_str(r.uri());
try!(r.write(format!("<p>URI: {}</p>", uri)));
let args = unwrap_str(r.args());
try!(r.write(format!("<p>Request Args: {}</p>", args)));
let content_type = unwrap_str(r.content_type());
try!(r.write(format!("<p>Content Type: {}</p>", content_type)));
let content_encoding = unwrap_str(r.content_encoding());
try!(r.write(format!("<p>Content Encoding: {}</p>", content_encoding)));
try!(r.write(format!("<p>Content Length: {}</p>", r.clength())));
try!(r.write(format!("<p>Is Initial Request: {}</p>", r.is_initial_req())));
let context_document_root = unwrap_str(r.context_document_root());
try!(r.write(format!("<p>Context Document Root: {}</p>", context_document_root)));
let context_prefix = unwrap_str(r.context_prefix());
try!(r.write(format!("<p>Context Prefix: {}</p>", context_prefix)));
let range = unwrap_str(r.range());
try!(r.write(format!("<p>Range: {}</p>", range)));
let handler = unwrap_str(r.handler());
try!(r.write(format!("<p>Handler: {}</p>", handler)));
let path_info = unwrap_str(r.path_info());
try!(r.write(format!("<p>Path Info: {}</p>", path_info)));
let filename = unwrap_str(r.filename());
try!(r.write(format!("<p>Filename: {}</p>", filename)));
let canonical_filename = unwrap_str(r.canonical_filename());
try!(r.write(format!("<p>Canonical Filename: {}</p>", canonical_filename)));
let request_time = try!(r.rfc822_date(r.request_time()));
try!(r.write(format!("<p>Request Time: {} / {}</p>", request_time, r.request_time())));
let mtime = try!(r.rfc822_date(r.mtime()));
try!(r.write(format!("<p>Last modified time: {} / {}</p>", mtime, r.mtime())));
let log_id = unwrap_str(r.log_id());
try!(r.write(format!("<p>Log ID: {}</p>", log_id)));
let user = unwrap_str(r.user());
try!(r.write(format!("<p>User: {}</p>", user)));
try!(r.write(format!("<p>Some Auth Required: {}</p>", r.some_auth_required())));
let ap_auth_type = unwrap_str(r.ap_auth_type());
try!(r.write(format!("<p>Auth Type: {}</p>", ap_auth_type)));
let auth_name = unwrap_str(r.auth_name());
try!(r.write(format!("<p>Auth Name: {}</p>", auth_name)));
let basic_auth_pw = unwrap_str(r.basic_auth_pw());
try!(r.write(format!("<p>Basic Auth PW: {}</p>", basic_auth_pw)));
try!(r.write(format!("<p>Default Port: {}</p>", r.default_port())));
try!(r.write(format!("<p>ProxyReq: {}</p>", r.proxyreq())));
let key = "sample_cookie";
let val = "info_rs";
match r.cookie(key) {
None => {
let mut cookie = Cookie::new(key, val);
cookie.expires = Some(time_now() + 1000000 * 30);
r.set_cookie(cookie);
try!(r.write(format!("<p>New Cookie – {}: {}</p>", key, val)));
},
Some(stored) => {
try!(r.write(format!("<p>Cookie – {}: {}</p>", key, stored)));
}
};
try!(r.write("<h3>Request Headers</h3>"));
let headers_in = get!(r.headers_in());
for (key, val) in headers_in.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
try!(r.write("<h3>Headers Out</h3>"));
let headers_out = get!(r.headers_out());
for (key, val) in headers_out.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
try!(r.write("<h3>Err Headers Out</h3>"));
let err_headers_out = get!(r.err_headers_out());
for (key, val) in err_headers_out.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
try!(r.write("<h3>Notes</h3>"));
let notes = get!(r.notes());
for (key, val) in notes.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
try!(r.write("<h3>Subprocess Environment</h3>"));
let subprocess_env = get!(r.subprocess_env());
for (key, val) in subprocess_env.iter() {
try!(r.write(format!("<p>{}: {}</p>", key, unwrap_str(val))));
}
try!(r.write("<h3>Request API check</h3>"));
let original = "Բարեւ, Héébee, გამარჯობა, Witôjze, Здраво, Ciao";
let encoded = try!(r.base64_encode(original));
let plain = try!(r.base64_decode(encoded));
try!(r.write(format!("<p>Original Text: {}</p>", original)));
try!(r.write(format!("<p>Base64 Encoded: {}</p>", encoded)));
try!(r.write(format!("<p>Base64 Decoded: {}</p>", plain)));
let original_url = "http://foo.bar/1 2 3 & 4 + 5";
let encoded_url = try!(r.escape_urlencoded(original_url));
let plain_url = try!(r.unescape_urlencoded(encoded_url));
try!(r.write(format!("<p>Original URL: {}</p>", original_url)));
try!(r.write(format!("<p>Encoded URL: {}</p>", encoded_url)));
try!(r.write(format!("<p>Decoded URL: {}</p>", plain_url)));
let date = try!(r.rfc822_date(0));
try!(r.write(format!("<p>RFC 822 Date: {}</p>", date)));
try!(r.write("</body></html>"));
Ok(Status::OK)
}
|
unwrap_str
|
identifier_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.