file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
vga_buffer.rs | use core::fmt;
use volatile::Volatile;
use spin::Mutex;
#[allow(dead_code)] // prevents compiler warnings that some enumerations are never used
#[derive(Debug, Clone, Copy, PartialEq, Eq)] // enables copy semantics for the type: makes printable & comparable
#[repr(u8)] // makes each enum variant be stored as a u8
pub enum Color {
Black = 0,
Blue = 1,
Green = 2,
Cyan = 3,
Red = 4,
Magenta = 5,
Brown = 6,
LightGray = 7,
DarkGray = 8,
LightBlue = 9,
LightGreen = 10,
LightCyan = 11,
LightRed = 12,
Pink = 13,
Yellow = 14,
White = 15,
}
// used to represent a full VGA color code (foreground & background)
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
struct ColorCode(u8); // creates a type which is essentially an alias for a single byte
impl ColorCode {
// creates a single byte detailing the fore and background colors (based on VGA specifications)
fn new(foreground: Color, background: Color) -> ColorCode {
ColorCode((background as u8) << 4 | (foreground as u8))
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
// ensures struct's field laid out exactly like a C struct since VGA depends on the order of the two bytes
#[repr(C)]
struct ScreenChar {
ascii_character: u8, // VGA byte representing ascii char
color_code: ColorCode, // VGA byte representing char's color
}
// VGA typical buffer sizes
const BUFFER_HEIGHT: usize = 25; // number of lines
const BUFFER_WIDTH: usize = 80; // number of chars in line
struct Buffer {
// Volatile crate keeps rust compiler from optimizing and removing writes
// since writes are never read and are going to the VGA buffer memory (a side-effect)
// and not just writing to RAM
chars: [[Volatile<ScreenChar>; BUFFER_WIDTH]; BUFFER_HEIGHT],
}
// To actually write to screen: always writes to last line & shift lines up when a line is full (or on \n)
pub struct Writer {
column_position: usize, // keeps track of current position in last row
color_code: ColorCode, // current fore & background colors
buffer: &'static mut Buffer, // reference to VGA buffer:'static lifetime specifies reference is valid for whole program run time (VGA buffer)
}
impl Writer {
// writes a single byte to the screen at current location
pub fn write_byte(&mut self, byte: u8) {
match byte {
b'\n' => self.new_line(),
byte => {
if self.column_position >= BUFFER_WIDTH {
self.new_line();
}
let row = BUFFER_HEIGHT - 1;
let col = self.column_position;
let color_code = self.color_code;
self.buffer.chars[row][col].write(ScreenChar {
ascii_character: byte,
color_code: color_code,
});
self.column_position += 1;
}
}
}
// accepts a string to be written only writing valid ascii chars
pub fn write_string(&mut self, s: &str) {
for byte in s.bytes() {
match byte {
// printable ASCII byte or newline
0x20...0x7e | b'\n' => self.write_byte(byte),
// not part of printable ASCII range
_ => self.write_byte(0xfe),
}
}
}
fn new_line(&mut self) {
// range notation is exclusive of upper end.
// top line of screen is 0 and is shifted off screen
for row in 1..BUFFER_HEIGHT {
for col in 0..BUFFER_WIDTH {
let char = self.buffer.chars[row][col].read();
self.buffer.chars[row-1][col].write(char);
}
}
// clears last line of output for new input, otherwise if string being written
// is not long enough all previous characters will not be overwritten
self.clear_row(BUFFER_HEIGHT - 1);
self.column_position = 0;
}
// clears row by overwriting characters with spaces
fn clear_row(&mut self, row: usize) {
let blank = ScreenChar {
ascii_character: b' ',
color_code: self.color_code,
};
for col in 0..BUFFER_WIDTH {
self.buffer.chars[row][col].write(blank);
}
}
}
// Provides support for Rust's formatting macros allowing easy printing
// of different types like integers or floats.
// Results in: Write! / Writeln! macro support
impl fmt::Write for Writer {
// The only required method of the fmt::Write trait
fn write_str(&mut self, s: &str) -> fmt::Result |
}
// Provides a static Writer object which utilizes non-const functions
// Requires locking to provide interior mutability: since it utilizes &mut self for writing
// it requires mutability, but its mutibility is not provided to users, therefore it is interior
// mutability. The Mutex allows safe usage internally.
lazy_static! {
pub static ref WRITER: Mutex<Writer> = Mutex::new(Writer {
column_position: 0,
color_code: ColorCode::new(Color::Yellow, Color::Black),
// provides a direct mutable reference to the VGA memory-mapped I/O address
// allowing reading and writing. We deem this safe as this address always corresponds to
// VGA, and therefore it is acceptable and required to wrap in an unsafe block
buffer: unsafe { &mut *(0xb8000 as *mut Buffer) },
});
}
// Defines the print! macro
#[macro_export]
macro_rules! print {
($($arg:tt)*) => ($crate::vga_buffer::print(format_args!($($arg)*)));
}
// Defines the println! macro
#[macro_export]
macro_rules! println {
() => (print!("\n"));
($fmt:expr) => (print!(concat!($fmt, "\n")));
($fmt:expr, $($arg:tt)*) => (print!(concat!($fmt, "\n"), $($arg)*));
}
pub fn print(args: fmt::Arguments) {
use core::fmt::Write; // imports write_fmt method from the Write trait
WRITER.lock().write_fmt(args).unwrap();
}
#[cfg(test)]
mod test {
use super::*; // import all items of parent module: vga_buffer
// Specifies what char represents an empty cell in VGA buffer during testing
fn empty_char() -> ScreenChar {
ScreenChar {
ascii_character: b' ',
color_code: ColorCode::new(Color::Green, Color::Brown),
}
}
fn construct_buffer() -> Buffer {
// bypasses array construction requiring that contained type is Copy
// ScreenChar satisfies this, but the Volatile wrapper does not
use array_init::array_init;
Buffer {
// Provides array initialization without non-Copy types.
// parameter of array_init is a closure. The single parameter to the closure is unused and therefore unimportant
// otherwise it could be used to perform calculations on value before creating the array.
// array_init utilizes type's size to create the required number of indices. In this case
// the number of columns and rows are defined in the Buffer struct
// "The width & height are deduced by type inference"
chars: array_init(|_| array_init(|_| Volatile::new(empty_char()))),
}
}
fn construct_writer() -> Writer {
use std::boxed::Box;
let buffer = construct_buffer();
Writer {
column_position: 0,
color_code: ColorCode::new(Color::Blue, Color::Magenta),
// transforms the created buffer into a &'static mut to satisfy buffer property's type
buffer: Box::leak(Box::new(buffer)),
}
}
#[test] // tells test framework this is a test function
fn write_byte() {
let mut writer = construct_writer();
writer.write_byte(b'X');
writer.write_byte(b'Y');
for (i, row) in writer.buffer.chars.iter().enumerate() {
for (j, screen_char) in row.iter().enumerate() {
let screen_char = screen_char.read();
if i == BUFFER_HEIGHT - 1 && j == 0 {
assert_eq!(screen_char.ascii_character, b'X');
assert_eq!(screen_char.color_code, writer.color_code);
} else if i == BUFFER_HEIGHT - 1 && j == 1 {
assert_eq!(screen_char.ascii_character, b'Y');
assert_eq!(screen_char.color_code, writer.color_code);
} else {
assert_eq!(screen_char, empty_char());
}
}
}
}
#[test]
fn write_formatted() {
use core::fmt::Write;
let mut writer = construct_writer();
writeln!(&mut writer, "a").unwrap();
writeln!(&mut writer, "b{}", "c").unwrap();
for (i, row) in writer.buffer.chars.iter().enumerate() {
for (j, screen_char) in row.iter().enumerate() {
let screen_char = screen_char.read();
if i == BUFFER_HEIGHT - 3 && j == 0 {
assert_eq!(screen_char.ascii_character, b'a');
assert_eq!(screen_char.color_code, writer.color_code);
} else if i == BUFFER_HEIGHT - 2 && j == 0 {
assert_eq!(screen_char.ascii_character, b'b');
assert_eq!(screen_char.color_code, writer.color_code);
} else if i == BUFFER_HEIGHT - 2 && j == 1 {
assert_eq!(screen_char.ascii_character, b'c');
assert_eq!(screen_char.color_code, writer.color_code);
} else if i >= BUFFER_HEIGHT - 2 { // ensures empty lines are shifted in on a new line and have correct color code
assert_eq!(screen_char.ascii_character, b' ');
assert_eq!(screen_char.color_code, writer.color_code);
} else {
assert_eq!(screen_char, empty_char());
}
}
}
}
}
| {
self.write_string(s);
Ok(())
} | identifier_body |
mod.rs | query(&self, spec: &str) -> CargoResult<&PackageId> {
let spec = try!(PackageIdSpec::parse(spec).chain_error(|| {
human(format!("invalid package id specification: `{}`", spec))
}));
let mut ids = self.iter().filter(|p| spec.matches(*p));
let ret = match ids.next() {
Some(id) => id,
None => return Err(human(format!("package id specification `{}` \
matched no packages", spec))),
};
return match ids.next() {
Some(other) => {
let mut msg = format!("There are multiple `{}` packages in \
your project, and the specification \
`{}` is ambiguous.\n\
Please re-run this command \
with `-p <spec>` where `<spec>` is one \
of the following:",
spec.name(), spec);
let mut vec = vec![ret, other];
vec.extend(ids);
minimize(&mut msg, vec, &spec);
Err(human(msg))
}
None => Ok(ret)
};
fn minimize(msg: &mut String,
ids: Vec<&PackageId>,
spec: &PackageIdSpec) {
let mut version_cnt = HashMap::new();
for id in ids.iter() {
*version_cnt.entry(id.version()).or_insert(0) += 1;
}
for id in ids.iter() {
if version_cnt[id.version()] == 1 {
msg.push_str(&format!("\n {}:{}", spec.name(),
id.version()));
} else {
msg.push_str(&format!("\n {}",
PackageIdSpec::from_package_id(*id)));
}
}
}
}
pub fn features(&self, pkg: &PackageId) -> Option<&HashSet<String>> |
}
impl fmt::Debug for Resolve {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
try!(write!(fmt, "graph: {:?}\n", self.graph));
try!(write!(fmt, "\nfeatures: {{\n"));
for (pkg, features) in &self.features {
try!(write!(fmt, " {}: {:?}\n", pkg, features));
}
write!(fmt, "}}")
}
}
#[derive(Clone)]
struct Context {
activations: HashMap<(String, SourceId), Vec<Rc<Summary>>>,
resolve: Resolve,
visited: Rc<RefCell<HashSet<PackageId>>>,
}
/// Builds the list of all packages required to build the first argument.
pub fn resolve(summary: &Summary, method: Method,
registry: &mut Registry) -> CargoResult<Resolve> {
trace!("resolve; summary={}", summary.package_id());
let summary = Rc::new(summary.clone());
let cx = Box::new(Context {
resolve: Resolve::new(summary.package_id().clone()),
activations: HashMap::new(),
visited: Rc::new(RefCell::new(HashSet::new())),
});
let _p = profile::start(format!("resolving: {}", summary.package_id()));
match try!(activate(cx, registry, &summary, method)) {
Ok(cx) => {
debug!("resolved: {:?}", cx.resolve);
Ok(cx.resolve)
}
Err(e) => Err(e),
}
}
fn activate(mut cx: Box<Context>,
registry: &mut Registry,
parent: &Rc<Summary>,
method: Method)
-> CargoResult<CargoResult<Box<Context>>> {
// Dependency graphs are required to be a DAG, so we keep a set of
// packages we're visiting and bail if we hit a dupe.
let id = parent.package_id();
if!cx.visited.borrow_mut().insert(id.clone()) {
return Err(human(format!("cyclic package dependency: package `{}` \
depends on itself", id)))
}
// If we're already activated, then that was easy!
if flag_activated(&mut *cx, parent, &method) {
cx.visited.borrow_mut().remove(id);
return Ok(Ok(cx))
}
debug!("activating {}", parent.package_id());
// Extracting the platform request.
let platform = match method {
Method::Required{target_platform: platform,..} => platform,
Method::Everything => None,
};
// First, figure out our set of dependencies based on the requsted set of
// features. This also calculates what features we're going to enable for
// our own dependencies.
let deps = try!(resolve_features(&mut cx, parent, method));
// Next, transform all dependencies into a list of possible candidates which
// can satisfy that dependency.
let mut deps = try!(deps.into_iter().map(|(_dep_name, (dep, features))| {
let mut candidates = try!(registry.query(dep));
// When we attempt versions for a package, we'll want to start at the
// maximum version and work our way down.
candidates.sort_by(|a, b| {
b.version().cmp(a.version())
});
let candidates = candidates.into_iter().map(Rc::new).collect::<Vec<_>>();
Ok((dep, candidates, features))
}).collect::<CargoResult<Vec<_>>>());
// When we recurse, attempt to resolve dependencies with fewer candidates
// before recursing on dependencies with more candidates. This way if the
// dependency with only one candidate can't be resolved we don't have to do
// a bunch of work before we figure that out.
deps.sort_by(|&(_, ref a, _), &(_, ref b, _)| {
a.len().cmp(&b.len())
});
// Workaround compilation error: `deps` does not live long enough
let platform = platform.map(|s| &*s);
Ok(match try!(activate_deps(cx, registry, parent, platform, &deps, 0)) {
Ok(cx) => {
cx.visited.borrow_mut().remove(parent.package_id());
Ok(cx)
}
Err(e) => Err(e),
})
}
// Activate this summary by inserting it into our list of known activations.
//
// Returns if this summary with the given method is already activated.
fn flag_activated(cx: &mut Context,
summary: &Rc<Summary>,
method: &Method) -> bool {
let id = summary.package_id();
let key = (id.name().to_string(), id.source_id().clone());
let prev = cx.activations.entry(key).or_insert(Vec::new());
if!prev.iter().any(|c| c == summary) {
cx.resolve.graph.add(id.clone(), &[]);
prev.push(summary.clone());
return false
}
debug!("checking if {} is already activated", summary.package_id());
let (features, use_default) = match *method {
Method::Required { features, uses_default_features,.. } => {
(features, uses_default_features)
}
Method::Everything => return false,
};
let has_default_feature = summary.features().contains_key("default");
match cx.resolve.features(id) {
Some(prev) => {
features.iter().all(|f| prev.contains(f)) &&
(!use_default || prev.contains("default") ||!has_default_feature)
}
None => features.len() == 0 && (!use_default ||!has_default_feature)
}
}
fn activate_deps<'a>(cx: Box<Context>,
registry: &mut Registry,
parent: &Summary,
platform: Option<&'a str>,
deps: &'a [(&Dependency, Vec<Rc<Summary>>, Vec<String>)],
cur: usize) -> CargoResult<CargoResult<Box<Context>>> {
if cur == deps.len() { return Ok(Ok(cx)) }
let (dep, ref candidates, ref features) = deps[cur];
let method = Method::Required{
dev_deps: false,
features: &features,
uses_default_features: dep.uses_default_features(),
target_platform: platform};
let key = (dep.name().to_string(), dep.source_id().clone());
let prev_active = cx.activations.get(&key)
.map(|v| &v[..]).unwrap_or(&[]);
trace!("{}[{}]>{} {} candidates", parent.name(), cur, dep.name(),
candidates.len());
trace!("{}[{}]>{} {} prev activations", parent.name(), cur,
dep.name(), prev_active.len());
// Filter the set of candidates based on the previously activated
// versions for this dependency. We can actually use a version if it
// precisely matches an activated version or if it is otherwise
// incompatible with all other activated versions. Note that we define
// "compatible" here in terms of the semver sense where if the left-most
// nonzero digit is the same they're considered compatible.
let my_candidates = candidates.iter().filter(|&b| {
prev_active.iter().any(|a| a == b) ||
prev_active.iter().all(|a| {
!compatible(a.version(), b.version())
})
});
// Alright, for each candidate that's gotten this far, it meets the
// following requirements:
//
// 1. The version matches the dependency requirement listed for this
// package
// 2. There are no activated versions for this package which are
// semver-compatible, or there's an activated version which is
// precisely equal to `candidate`.
//
// This means that we're going to attempt to activate each candidate in
// turn. We could possibly fail to activate each candidate, so we try
// each one in turn.
let mut last_err = None;
for candidate in my_candidates {
trace!("{}[{}]>{} trying {}", parent.name(), cur, dep.name(),
candidate.version());
let mut my_cx = cx.clone();
my_cx.resolve.graph.link(parent.package_id().clone(),
candidate.package_id().clone());
// If we hit an intransitive dependency then clear out the visitation
// list as we can't induce a cycle through transitive dependencies.
if!dep.is_transitive() {
my_cx.visited.borrow_mut().clear();
}
let my_cx = match try!(activate(my_cx, registry, candidate, method)) {
Ok(cx) => cx,
Err(e) => { last_err = Some(e); continue }
};
match try!(activate_deps(my_cx, registry, parent, platform, deps,
cur + 1)) {
Ok(cx) => return Ok(Ok(cx)),
Err(e) => { last_err = Some(e); }
}
}
trace!("{}[{}]>{} -- {:?}", parent.name(), cur, dep.name(),
last_err);
// Oh well, we couldn't activate any of the candidates, so we just can't
// activate this dependency at all
Ok(activation_error(&cx, registry, last_err, parent, dep, prev_active,
&candidates))
}
fn activation_error(cx: &Context,
registry: &mut Registry,
err: Option<Box<CargoError>>,
parent: &Summary,
dep: &Dependency,
prev_active: &[Rc<Summary>],
candidates: &[Rc<Summary>]) -> CargoResult<Box<Context>> {
match err {
Some(e) => return Err(e),
None => {}
}
if candidates.len() > 0 {
let mut msg = format!("failed to select a version for `{}` \
(required by `{}`):\n\
all possible versions conflict with \
previously selected versions of `{}`",
dep.name(), parent.name(),
dep.name());
'outer: for v in prev_active.iter() {
for node in cx.resolve.graph.iter() {
let edges = match cx.resolve.graph.edges(node) {
Some(edges) => edges,
None => continue,
};
for edge in edges {
if edge!= v.package_id() { continue }
msg.push_str(&format!("\n version {} in use by {}",
v.version(), edge));
continue 'outer;
}
}
msg.push_str(&format!("\n version {} in use by??",
v.version()));
}
msg.push_str(&format!("\n possible versions to select: {}",
candidates.iter()
.map(|v| v.version())
.map(|v| v.to_string())
.collect::<Vec<_>>()
.connect(", ")));
return Err(human(msg))
}
// Once we're all the way down here, we're definitely lost in the
// weeds! We didn't actually use any candidates above, so we need to
// give an error message that nothing was found.
//
// Note that we re-query the registry with a new dependency that
// allows any version so we can give some nicer error reporting
// which indicates a few versions that were actually found.
let msg = format!("no matching package named `{}` found \
(required by `{}`)\n\
location searched: {}\n\
version required: {}",
dep.name(), parent.name(),
dep.source_id(),
dep.version_req());
let mut msg = msg;
let all_req = semver::VersionReq::parse("*").unwrap();
let new_dep = dep.clone().set_version_req(all_req);
let mut candidates = try!(registry.query(&new_dep));
candidates.sort_by(|a, b| {
b.version().cmp(a.version())
});
if candidates.len() > 0 {
msg.push_str("\nversions found: ");
for (i, c) in candidates.iter().take(3).enumerate() {
if i!= 0 { msg.push_str(", "); }
msg.push_str(&c.version().to_string());
}
if candidates.len() > 3 {
msg.push_str(",...");
}
}
// If we have a path dependency with a locked version, then this may
// indicate that we updated a sub-package and forgot to run `cargo
// update`. In this case try to print a helpful error!
if dep.source_id().is_path() &&
dep.version_req().to_string().starts_with("=") &&
candidates.len() > 0 {
msg.push_str("\nconsider running `cargo update` to update \
a path dependency's locked version");
}
Err(human(msg))
}
// Returns if `a` and `b` are compatible in the semver sense. This is a
// commutative operation.
//
// Versions `a` and `b` are compatible if their left-most nonzero digit is the
// same.
fn compatible(a: &semver::Version, b: &semver::Version) -> bool {
if a.major!= b.major { return false }
if a.major!= 0 { return true }
if a.minor!= b.minor { return false }
if a.minor!= 0 { return true }
a.patch == b.patch
}
fn resolve_features<'a>(cx: &mut Context, parent: &'a Summary,
method: Method)
-> CargoResult<HashMap<&'a str,
(&'a Dependency, Vec<String>)>> {
let dev_deps = match method {
Method::Everything => true,
Method::Required { dev_deps,.. } => dev_deps,
};
// First, filter by dev-dependencies
let deps = parent.dependencies();
let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps);
// Second, ignoring dependencies that should not be compiled for this platform
let deps = deps.filter(|d| {
match method {
Method::Required{target_platform: Some(ref platform),..} => {
d.is_active_for_platform(platform)
},
_ => true
}
});
let (mut feature_deps, used_features) = try!(build_features(parent, method));
let mut ret = HashMap::new();
// Next, sanitize all requested features by whitelisting all the requested
// features that correspond to optional dependencies
for dep in deps {
// weed out optional dependencies, but not those required
if dep.is_optional() &&!feature_deps.contains_key(dep.name()) {
continue
}
let mut base = feature_deps.remove(dep.name()).unwrap_or(vec![]);
for feature in dep.features().iter() {
base.push(feature.clone());
if feature.contains("/") {
return Err(human(format!("features in dependencies \
cannot enable features in \
other dependencies: `{}`",
feature)));
}
}
ret.insert(dep.name(), (dep, base));
}
// All features can only point to optional dependencies, in which case they
// should have all been weeded out by the above iteration. Any remaining
// features are bugs in that the package does not actually have those
// features.
if feature_deps.len() > 0 {
let unknown = feature_deps.keys().map(|s| &s[..])
.collect::<Vec<&str>>();
if unknown.len() > 0 {
let features = unknown.connect(", ");
return Err(human(format!("Package `{}` does not have these features: \
`{}`", parent.package_id(), features)))
}
}
// Record what list of features is active for this package.
if used_features.len() > 0 {
let pkgid = parent.package_id();
cx.resolve.features.entry(pkgid.clone())
.or_insert(HashSet::new())
.extend(used_features);
}
Ok(ret)
}
// Returns a pair of (feature dependencies, all used features)
//
// The feature dependencies map is a mapping of package name to list of features
// enabled. Each package should be enabled, and each package should have the
// specified set of features enabled.
//
// The all used features set is the set of features which this local package had
// enabled, which is later used when compiling to instruct the code what
// features were enabled.
fn build_features(s: &Summary, method: Method)
-> CargoResult<(HashMap<String, Vec<String>>, HashSet<String>)> {
let mut deps = HashMap::new();
let mut used = HashSet::new();
let mut visited = HashSet::new();
match method {
Method::Everything => {
for key in s.features().keys() {
try!(add_feature(s, key, &mut deps, &mut used, &mut visited));
}
for dep in s.dependencies().iter().filter(|d| d.is_optional()) {
try!(add_feature(s, dep.name(), &mut deps, | {
self.features.get(pkg)
} | identifier_body |
mod.rs | `", spec))
}));
let mut ids = self.iter().filter(|p| spec.matches(*p));
let ret = match ids.next() {
Some(id) => id,
None => return Err(human(format!("package id specification `{}` \
matched no packages", spec))),
};
return match ids.next() {
Some(other) => {
let mut msg = format!("There are multiple `{}` packages in \
your project, and the specification \
`{}` is ambiguous.\n\
Please re-run this command \
with `-p <spec>` where `<spec>` is one \
of the following:",
spec.name(), spec);
let mut vec = vec![ret, other];
vec.extend(ids);
minimize(&mut msg, vec, &spec);
Err(human(msg))
}
None => Ok(ret)
};
fn minimize(msg: &mut String,
ids: Vec<&PackageId>,
spec: &PackageIdSpec) {
let mut version_cnt = HashMap::new();
for id in ids.iter() {
*version_cnt.entry(id.version()).or_insert(0) += 1;
}
for id in ids.iter() {
if version_cnt[id.version()] == 1 {
msg.push_str(&format!("\n {}:{}", spec.name(),
id.version()));
} else {
msg.push_str(&format!("\n {}",
PackageIdSpec::from_package_id(*id)));
}
}
}
}
pub fn features(&self, pkg: &PackageId) -> Option<&HashSet<String>> {
self.features.get(pkg)
}
}
impl fmt::Debug for Resolve {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
try!(write!(fmt, "graph: {:?}\n", self.graph));
try!(write!(fmt, "\nfeatures: {{\n"));
for (pkg, features) in &self.features {
try!(write!(fmt, " {}: {:?}\n", pkg, features));
}
write!(fmt, "}}")
}
}
#[derive(Clone)]
struct Context {
activations: HashMap<(String, SourceId), Vec<Rc<Summary>>>,
resolve: Resolve,
visited: Rc<RefCell<HashSet<PackageId>>>,
}
/// Builds the list of all packages required to build the first argument.
pub fn resolve(summary: &Summary, method: Method,
registry: &mut Registry) -> CargoResult<Resolve> {
trace!("resolve; summary={}", summary.package_id());
let summary = Rc::new(summary.clone());
let cx = Box::new(Context {
resolve: Resolve::new(summary.package_id().clone()),
activations: HashMap::new(),
visited: Rc::new(RefCell::new(HashSet::new())),
});
let _p = profile::start(format!("resolving: {}", summary.package_id()));
match try!(activate(cx, registry, &summary, method)) {
Ok(cx) => {
debug!("resolved: {:?}", cx.resolve);
Ok(cx.resolve)
}
Err(e) => Err(e),
}
}
fn activate(mut cx: Box<Context>,
registry: &mut Registry,
parent: &Rc<Summary>,
method: Method)
-> CargoResult<CargoResult<Box<Context>>> {
// Dependency graphs are required to be a DAG, so we keep a set of
// packages we're visiting and bail if we hit a dupe.
let id = parent.package_id();
if!cx.visited.borrow_mut().insert(id.clone()) {
return Err(human(format!("cyclic package dependency: package `{}` \
depends on itself", id)))
}
// If we're already activated, then that was easy!
if flag_activated(&mut *cx, parent, &method) {
cx.visited.borrow_mut().remove(id);
return Ok(Ok(cx))
}
debug!("activating {}", parent.package_id());
// Extracting the platform request.
let platform = match method {
Method::Required{target_platform: platform,..} => platform,
Method::Everything => None,
};
// First, figure out our set of dependencies based on the requsted set of
// features. This also calculates what features we're going to enable for
// our own dependencies.
let deps = try!(resolve_features(&mut cx, parent, method));
// Next, transform all dependencies into a list of possible candidates which
// can satisfy that dependency.
let mut deps = try!(deps.into_iter().map(|(_dep_name, (dep, features))| {
let mut candidates = try!(registry.query(dep));
// When we attempt versions for a package, we'll want to start at the
// maximum version and work our way down.
candidates.sort_by(|a, b| {
b.version().cmp(a.version())
});
let candidates = candidates.into_iter().map(Rc::new).collect::<Vec<_>>();
Ok((dep, candidates, features))
}).collect::<CargoResult<Vec<_>>>());
// When we recurse, attempt to resolve dependencies with fewer candidates
// before recursing on dependencies with more candidates. This way if the
// dependency with only one candidate can't be resolved we don't have to do
// a bunch of work before we figure that out.
deps.sort_by(|&(_, ref a, _), &(_, ref b, _)| {
a.len().cmp(&b.len())
});
// Workaround compilation error: `deps` does not live long enough
let platform = platform.map(|s| &*s);
Ok(match try!(activate_deps(cx, registry, parent, platform, &deps, 0)) {
Ok(cx) => {
cx.visited.borrow_mut().remove(parent.package_id());
Ok(cx)
}
Err(e) => Err(e),
})
}
// Activate this summary by inserting it into our list of known activations.
//
// Returns if this summary with the given method is already activated.
fn flag_activated(cx: &mut Context,
summary: &Rc<Summary>,
method: &Method) -> bool {
let id = summary.package_id();
let key = (id.name().to_string(), id.source_id().clone());
let prev = cx.activations.entry(key).or_insert(Vec::new());
if!prev.iter().any(|c| c == summary) {
cx.resolve.graph.add(id.clone(), &[]);
prev.push(summary.clone());
return false
}
debug!("checking if {} is already activated", summary.package_id());
let (features, use_default) = match *method {
Method::Required { features, uses_default_features,.. } => {
(features, uses_default_features)
}
Method::Everything => return false,
};
let has_default_feature = summary.features().contains_key("default");
match cx.resolve.features(id) {
Some(prev) => {
features.iter().all(|f| prev.contains(f)) &&
(!use_default || prev.contains("default") ||!has_default_feature)
}
None => features.len() == 0 && (!use_default ||!has_default_feature)
}
}
fn activate_deps<'a>(cx: Box<Context>,
registry: &mut Registry,
parent: &Summary,
platform: Option<&'a str>,
deps: &'a [(&Dependency, Vec<Rc<Summary>>, Vec<String>)],
cur: usize) -> CargoResult<CargoResult<Box<Context>>> {
if cur == deps.len() { return Ok(Ok(cx)) }
let (dep, ref candidates, ref features) = deps[cur];
let method = Method::Required{
dev_deps: false,
features: &features,
uses_default_features: dep.uses_default_features(),
target_platform: platform};
let key = (dep.name().to_string(), dep.source_id().clone());
let prev_active = cx.activations.get(&key)
.map(|v| &v[..]).unwrap_or(&[]);
trace!("{}[{}]>{} {} candidates", parent.name(), cur, dep.name(),
candidates.len());
trace!("{}[{}]>{} {} prev activations", parent.name(), cur,
dep.name(), prev_active.len());
// Filter the set of candidates based on the previously activated
// versions for this dependency. We can actually use a version if it
// precisely matches an activated version or if it is otherwise
// incompatible with all other activated versions. Note that we define
// "compatible" here in terms of the semver sense where if the left-most
// nonzero digit is the same they're considered compatible.
let my_candidates = candidates.iter().filter(|&b| {
prev_active.iter().any(|a| a == b) ||
prev_active.iter().all(|a| {
!compatible(a.version(), b.version())
})
});
// Alright, for each candidate that's gotten this far, it meets the
// following requirements:
//
// 1. The version matches the dependency requirement listed for this
// package
// 2. There are no activated versions for this package which are
// semver-compatible, or there's an activated version which is
// precisely equal to `candidate`.
//
// This means that we're going to attempt to activate each candidate in
// turn. We could possibly fail to activate each candidate, so we try
// each one in turn.
let mut last_err = None;
for candidate in my_candidates {
trace!("{}[{}]>{} trying {}", parent.name(), cur, dep.name(),
candidate.version());
let mut my_cx = cx.clone();
my_cx.resolve.graph.link(parent.package_id().clone(),
candidate.package_id().clone());
// If we hit an intransitive dependency then clear out the visitation
// list as we can't induce a cycle through transitive dependencies.
if!dep.is_transitive() {
my_cx.visited.borrow_mut().clear();
}
let my_cx = match try!(activate(my_cx, registry, candidate, method)) {
Ok(cx) => cx,
Err(e) => { last_err = Some(e); continue }
};
match try!(activate_deps(my_cx, registry, parent, platform, deps,
cur + 1)) {
Ok(cx) => return Ok(Ok(cx)),
Err(e) => { last_err = Some(e); }
}
}
trace!("{}[{}]>{} -- {:?}", parent.name(), cur, dep.name(),
last_err);
// Oh well, we couldn't activate any of the candidates, so we just can't
// activate this dependency at all
Ok(activation_error(&cx, registry, last_err, parent, dep, prev_active,
&candidates))
}
fn activation_error(cx: &Context,
registry: &mut Registry,
err: Option<Box<CargoError>>,
parent: &Summary,
dep: &Dependency,
prev_active: &[Rc<Summary>],
candidates: &[Rc<Summary>]) -> CargoResult<Box<Context>> {
match err {
Some(e) => return Err(e),
None => {}
}
if candidates.len() > 0 {
let mut msg = format!("failed to select a version for `{}` \
(required by `{}`):\n\
all possible versions conflict with \
previously selected versions of `{}`",
dep.name(), parent.name(),
dep.name());
'outer: for v in prev_active.iter() {
for node in cx.resolve.graph.iter() {
let edges = match cx.resolve.graph.edges(node) {
Some(edges) => edges,
None => continue,
};
for edge in edges {
if edge!= v.package_id() { continue }
msg.push_str(&format!("\n version {} in use by {}",
v.version(), edge));
continue 'outer;
}
}
msg.push_str(&format!("\n version {} in use by??",
v.version()));
}
msg.push_str(&format!("\n possible versions to select: {}",
candidates.iter()
.map(|v| v.version())
.map(|v| v.to_string())
.collect::<Vec<_>>()
.connect(", ")));
return Err(human(msg))
}
// Once we're all the way down here, we're definitely lost in the
// weeds! We didn't actually use any candidates above, so we need to
// give an error message that nothing was found.
//
// Note that we re-query the registry with a new dependency that
// allows any version so we can give some nicer error reporting
// which indicates a few versions that were actually found.
let msg = format!("no matching package named `{}` found \
(required by `{}`)\n\
location searched: {}\n\
version required: {}",
dep.name(), parent.name(),
dep.source_id(),
dep.version_req());
let mut msg = msg;
let all_req = semver::VersionReq::parse("*").unwrap();
let new_dep = dep.clone().set_version_req(all_req);
let mut candidates = try!(registry.query(&new_dep));
candidates.sort_by(|a, b| {
b.version().cmp(a.version())
});
if candidates.len() > 0 {
msg.push_str("\nversions found: ");
for (i, c) in candidates.iter().take(3).enumerate() {
if i!= 0 { msg.push_str(", "); }
msg.push_str(&c.version().to_string());
}
if candidates.len() > 3 {
msg.push_str(",...");
}
}
// If we have a path dependency with a locked version, then this may
// indicate that we updated a sub-package and forgot to run `cargo
// update`. In this case try to print a helpful error!
if dep.source_id().is_path() &&
dep.version_req().to_string().starts_with("=") &&
candidates.len() > 0 {
msg.push_str("\nconsider running `cargo update` to update \
a path dependency's locked version");
}
Err(human(msg))
}
// Returns if `a` and `b` are compatible in the semver sense. This is a
// commutative operation.
//
// Versions `a` and `b` are compatible if their left-most nonzero digit is the
// same.
fn compatible(a: &semver::Version, b: &semver::Version) -> bool {
if a.major!= b.major { return false }
if a.major!= 0 { return true }
if a.minor!= b.minor { return false }
if a.minor!= 0 { return true }
a.patch == b.patch
}
fn resolve_features<'a>(cx: &mut Context, parent: &'a Summary,
method: Method)
-> CargoResult<HashMap<&'a str,
(&'a Dependency, Vec<String>)>> {
let dev_deps = match method {
Method::Everything => true,
Method::Required { dev_deps,.. } => dev_deps,
};
// First, filter by dev-dependencies
let deps = parent.dependencies();
let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps);
// Second, ignoring dependencies that should not be compiled for this platform
let deps = deps.filter(|d| {
match method {
Method::Required{target_platform: Some(ref platform),..} => {
d.is_active_for_platform(platform)
},
_ => true
}
});
let (mut feature_deps, used_features) = try!(build_features(parent, method));
let mut ret = HashMap::new();
// Next, sanitize all requested features by whitelisting all the requested
// features that correspond to optional dependencies
for dep in deps {
// weed out optional dependencies, but not those required
if dep.is_optional() &&!feature_deps.contains_key(dep.name()) {
continue
}
let mut base = feature_deps.remove(dep.name()).unwrap_or(vec![]);
for feature in dep.features().iter() {
base.push(feature.clone());
if feature.contains("/") {
return Err(human(format!("features in dependencies \
cannot enable features in \
other dependencies: `{}`",
feature)));
}
}
ret.insert(dep.name(), (dep, base));
}
// All features can only point to optional dependencies, in which case they
// should have all been weeded out by the above iteration. Any remaining
// features are bugs in that the package does not actually have those
// features.
if feature_deps.len() > 0 {
let unknown = feature_deps.keys().map(|s| &s[..])
.collect::<Vec<&str>>();
if unknown.len() > 0 {
let features = unknown.connect(", ");
return Err(human(format!("Package `{}` does not have these features: \
`{}`", parent.package_id(), features)))
}
}
// Record what list of features is active for this package.
if used_features.len() > 0 {
let pkgid = parent.package_id();
cx.resolve.features.entry(pkgid.clone())
.or_insert(HashSet::new())
.extend(used_features);
}
Ok(ret)
}
// Returns a pair of (feature dependencies, all used features)
//
// The feature dependencies map is a mapping of package name to list of features
// enabled. Each package should be enabled, and each package should have the
// specified set of features enabled.
//
// The all used features set is the set of features which this local package had
// enabled, which is later used when compiling to instruct the code what
// features were enabled.
fn build_features(s: &Summary, method: Method)
-> CargoResult<(HashMap<String, Vec<String>>, HashSet<String>)> {
let mut deps = HashMap::new();
let mut used = HashSet::new();
let mut visited = HashSet::new();
match method {
Method::Everything => | {
for key in s.features().keys() {
try!(add_feature(s, key, &mut deps, &mut used, &mut visited));
}
for dep in s.dependencies().iter().filter(|d| d.is_optional()) {
try!(add_feature(s, dep.name(), &mut deps, &mut used,
&mut visited));
}
} | conditional_block |
|
mod.rs | (&self) -> Nodes<PackageId> {
self.graph.iter()
}
pub fn root(&self) -> &PackageId { &self.root }
pub fn deps(&self, pkg: &PackageId) -> Option<Edges<PackageId>> {
self.graph.edges(pkg)
}
pub fn query(&self, spec: &str) -> CargoResult<&PackageId> {
let spec = try!(PackageIdSpec::parse(spec).chain_error(|| {
human(format!("invalid package id specification: `{}`", spec))
}));
let mut ids = self.iter().filter(|p| spec.matches(*p));
let ret = match ids.next() {
Some(id) => id,
None => return Err(human(format!("package id specification `{}` \
matched no packages", spec))),
};
return match ids.next() {
Some(other) => {
let mut msg = format!("There are multiple `{}` packages in \
your project, and the specification \
`{}` is ambiguous.\n\
Please re-run this command \
with `-p <spec>` where `<spec>` is one \
of the following:",
spec.name(), spec);
let mut vec = vec![ret, other];
vec.extend(ids);
minimize(&mut msg, vec, &spec);
Err(human(msg))
}
None => Ok(ret)
};
fn minimize(msg: &mut String,
ids: Vec<&PackageId>,
spec: &PackageIdSpec) {
let mut version_cnt = HashMap::new();
for id in ids.iter() {
*version_cnt.entry(id.version()).or_insert(0) += 1;
}
for id in ids.iter() {
if version_cnt[id.version()] == 1 {
msg.push_str(&format!("\n {}:{}", spec.name(),
id.version()));
} else {
msg.push_str(&format!("\n {}",
PackageIdSpec::from_package_id(*id)));
}
}
}
}
pub fn features(&self, pkg: &PackageId) -> Option<&HashSet<String>> {
self.features.get(pkg)
}
}
impl fmt::Debug for Resolve {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
try!(write!(fmt, "graph: {:?}\n", self.graph));
try!(write!(fmt, "\nfeatures: {{\n"));
for (pkg, features) in &self.features {
try!(write!(fmt, " {}: {:?}\n", pkg, features));
}
write!(fmt, "}}")
}
}
#[derive(Clone)]
struct Context {
activations: HashMap<(String, SourceId), Vec<Rc<Summary>>>,
resolve: Resolve,
visited: Rc<RefCell<HashSet<PackageId>>>,
}
/// Builds the list of all packages required to build the first argument.
pub fn resolve(summary: &Summary, method: Method,
registry: &mut Registry) -> CargoResult<Resolve> {
trace!("resolve; summary={}", summary.package_id());
let summary = Rc::new(summary.clone());
let cx = Box::new(Context {
resolve: Resolve::new(summary.package_id().clone()),
activations: HashMap::new(),
visited: Rc::new(RefCell::new(HashSet::new())),
});
let _p = profile::start(format!("resolving: {}", summary.package_id()));
match try!(activate(cx, registry, &summary, method)) {
Ok(cx) => {
debug!("resolved: {:?}", cx.resolve);
Ok(cx.resolve)
}
Err(e) => Err(e),
}
}
fn activate(mut cx: Box<Context>,
registry: &mut Registry,
parent: &Rc<Summary>,
method: Method)
-> CargoResult<CargoResult<Box<Context>>> {
// Dependency graphs are required to be a DAG, so we keep a set of
// packages we're visiting and bail if we hit a dupe.
let id = parent.package_id();
if!cx.visited.borrow_mut().insert(id.clone()) {
return Err(human(format!("cyclic package dependency: package `{}` \
depends on itself", id)))
}
// If we're already activated, then that was easy!
if flag_activated(&mut *cx, parent, &method) {
cx.visited.borrow_mut().remove(id);
return Ok(Ok(cx))
}
debug!("activating {}", parent.package_id());
// Extracting the platform request.
let platform = match method {
Method::Required{target_platform: platform,..} => platform,
Method::Everything => None,
};
// First, figure out our set of dependencies based on the requsted set of
// features. This also calculates what features we're going to enable for
// our own dependencies.
let deps = try!(resolve_features(&mut cx, parent, method));
// Next, transform all dependencies into a list of possible candidates which
// can satisfy that dependency.
let mut deps = try!(deps.into_iter().map(|(_dep_name, (dep, features))| {
let mut candidates = try!(registry.query(dep));
// When we attempt versions for a package, we'll want to start at the
// maximum version and work our way down.
candidates.sort_by(|a, b| {
b.version().cmp(a.version())
});
let candidates = candidates.into_iter().map(Rc::new).collect::<Vec<_>>();
Ok((dep, candidates, features))
}).collect::<CargoResult<Vec<_>>>());
// When we recurse, attempt to resolve dependencies with fewer candidates
// before recursing on dependencies with more candidates. This way if the
// dependency with only one candidate can't be resolved we don't have to do
// a bunch of work before we figure that out.
deps.sort_by(|&(_, ref a, _), &(_, ref b, _)| {
a.len().cmp(&b.len())
});
// Workaround compilation error: `deps` does not live long enough
let platform = platform.map(|s| &*s);
Ok(match try!(activate_deps(cx, registry, parent, platform, &deps, 0)) {
Ok(cx) => {
cx.visited.borrow_mut().remove(parent.package_id());
Ok(cx)
}
Err(e) => Err(e),
})
}
// Activate this summary by inserting it into our list of known activations.
//
// Returns if this summary with the given method is already activated.
fn flag_activated(cx: &mut Context,
summary: &Rc<Summary>,
method: &Method) -> bool {
let id = summary.package_id();
let key = (id.name().to_string(), id.source_id().clone());
let prev = cx.activations.entry(key).or_insert(Vec::new());
if!prev.iter().any(|c| c == summary) {
cx.resolve.graph.add(id.clone(), &[]);
prev.push(summary.clone());
return false
}
debug!("checking if {} is already activated", summary.package_id());
let (features, use_default) = match *method {
Method::Required { features, uses_default_features,.. } => {
(features, uses_default_features)
}
Method::Everything => return false,
};
let has_default_feature = summary.features().contains_key("default");
match cx.resolve.features(id) {
Some(prev) => {
features.iter().all(|f| prev.contains(f)) &&
(!use_default || prev.contains("default") ||!has_default_feature)
}
None => features.len() == 0 && (!use_default ||!has_default_feature)
}
}
fn activate_deps<'a>(cx: Box<Context>,
registry: &mut Registry,
parent: &Summary,
platform: Option<&'a str>,
deps: &'a [(&Dependency, Vec<Rc<Summary>>, Vec<String>)],
cur: usize) -> CargoResult<CargoResult<Box<Context>>> {
if cur == deps.len() { return Ok(Ok(cx)) }
let (dep, ref candidates, ref features) = deps[cur];
let method = Method::Required{
dev_deps: false,
features: &features,
uses_default_features: dep.uses_default_features(),
target_platform: platform};
let key = (dep.name().to_string(), dep.source_id().clone());
let prev_active = cx.activations.get(&key)
.map(|v| &v[..]).unwrap_or(&[]);
trace!("{}[{}]>{} {} candidates", parent.name(), cur, dep.name(),
candidates.len());
trace!("{}[{}]>{} {} prev activations", parent.name(), cur,
dep.name(), prev_active.len());
// Filter the set of candidates based on the previously activated
// versions for this dependency. We can actually use a version if it
// precisely matches an activated version or if it is otherwise
// incompatible with all other activated versions. Note that we define
// "compatible" here in terms of the semver sense where if the left-most
// nonzero digit is the same they're considered compatible.
let my_candidates = candidates.iter().filter(|&b| {
prev_active.iter().any(|a| a == b) ||
prev_active.iter().all(|a| {
!compatible(a.version(), b.version())
})
});
// Alright, for each candidate that's gotten this far, it meets the
// following requirements:
//
// 1. The version matches the dependency requirement listed for this
// package
// 2. There are no activated versions for this package which are
// semver-compatible, or there's an activated version which is
// precisely equal to `candidate`.
//
// This means that we're going to attempt to activate each candidate in
// turn. We could possibly fail to activate each candidate, so we try
// each one in turn.
let mut last_err = None;
for candidate in my_candidates {
trace!("{}[{}]>{} trying {}", parent.name(), cur, dep.name(),
candidate.version());
let mut my_cx = cx.clone();
my_cx.resolve.graph.link(parent.package_id().clone(),
candidate.package_id().clone());
// If we hit an intransitive dependency then clear out the visitation
// list as we can't induce a cycle through transitive dependencies.
if!dep.is_transitive() {
my_cx.visited.borrow_mut().clear();
}
let my_cx = match try!(activate(my_cx, registry, candidate, method)) {
Ok(cx) => cx,
Err(e) => { last_err = Some(e); continue }
};
match try!(activate_deps(my_cx, registry, parent, platform, deps,
cur + 1)) {
Ok(cx) => return Ok(Ok(cx)),
Err(e) => { last_err = Some(e); }
}
}
trace!("{}[{}]>{} -- {:?}", parent.name(), cur, dep.name(),
last_err);
// Oh well, we couldn't activate any of the candidates, so we just can't
// activate this dependency at all
Ok(activation_error(&cx, registry, last_err, parent, dep, prev_active,
&candidates))
}
fn activation_error(cx: &Context,
registry: &mut Registry,
err: Option<Box<CargoError>>,
parent: &Summary,
dep: &Dependency,
prev_active: &[Rc<Summary>],
candidates: &[Rc<Summary>]) -> CargoResult<Box<Context>> {
match err {
Some(e) => return Err(e),
None => {}
}
if candidates.len() > 0 {
let mut msg = format!("failed to select a version for `{}` \
(required by `{}`):\n\
all possible versions conflict with \
previously selected versions of `{}`",
dep.name(), parent.name(),
dep.name());
'outer: for v in prev_active.iter() {
for node in cx.resolve.graph.iter() {
let edges = match cx.resolve.graph.edges(node) {
Some(edges) => edges,
None => continue,
};
for edge in edges {
if edge!= v.package_id() { continue }
msg.push_str(&format!("\n version {} in use by {}",
v.version(), edge));
continue 'outer;
}
}
msg.push_str(&format!("\n version {} in use by??",
v.version()));
}
msg.push_str(&format!("\n possible versions to select: {}",
candidates.iter()
.map(|v| v.version())
.map(|v| v.to_string())
.collect::<Vec<_>>()
.connect(", ")));
return Err(human(msg))
}
// Once we're all the way down here, we're definitely lost in the
// weeds! We didn't actually use any candidates above, so we need to
// give an error message that nothing was found.
//
// Note that we re-query the registry with a new dependency that
// allows any version so we can give some nicer error reporting
// which indicates a few versions that were actually found.
let msg = format!("no matching package named `{}` found \
(required by `{}`)\n\
location searched: {}\n\
version required: {}",
dep.name(), parent.name(),
dep.source_id(),
dep.version_req());
let mut msg = msg;
let all_req = semver::VersionReq::parse("*").unwrap();
let new_dep = dep.clone().set_version_req(all_req);
let mut candidates = try!(registry.query(&new_dep));
candidates.sort_by(|a, b| {
b.version().cmp(a.version())
});
if candidates.len() > 0 {
msg.push_str("\nversions found: ");
for (i, c) in candidates.iter().take(3).enumerate() {
if i!= 0 { msg.push_str(", "); }
msg.push_str(&c.version().to_string());
}
if candidates.len() > 3 {
msg.push_str(",...");
}
}
// If we have a path dependency with a locked version, then this may
// indicate that we updated a sub-package and forgot to run `cargo
// update`. In this case try to print a helpful error!
if dep.source_id().is_path() &&
dep.version_req().to_string().starts_with("=") &&
candidates.len() > 0 {
msg.push_str("\nconsider running `cargo update` to update \
a path dependency's locked version");
}
Err(human(msg))
}
// Returns if `a` and `b` are compatible in the semver sense. This is a
// commutative operation.
//
// Versions `a` and `b` are compatible if their left-most nonzero digit is the
// same.
fn compatible(a: &semver::Version, b: &semver::Version) -> bool {
if a.major!= b.major { return false }
if a.major!= 0 { return true }
if a.minor!= b.minor { return false }
if a.minor!= 0 { return true }
a.patch == b.patch
}
fn resolve_features<'a>(cx: &mut Context, parent: &'a Summary,
method: Method)
-> CargoResult<HashMap<&'a str,
(&'a Dependency, Vec<String>)>> {
let dev_deps = match method {
Method::Everything => true,
Method::Required { dev_deps,.. } => dev_deps,
};
// First, filter by dev-dependencies
let deps = parent.dependencies();
let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps);
// Second, ignoring dependencies that should not be compiled for this platform
let deps = deps.filter(|d| {
match method {
Method::Required{target_platform: Some(ref platform),..} => {
d.is_active_for_platform(platform)
},
_ => true
}
});
let (mut feature_deps, used_features) = try!(build_features(parent, method));
let mut ret = HashMap::new();
// Next, sanitize all requested features by whitelisting all the requested
// features that correspond to optional dependencies
for dep in deps {
// weed out optional dependencies, but not those required
if dep.is_optional() &&!feature_deps.contains_key(dep.name()) {
continue
}
let mut base = feature_deps.remove(dep.name()).unwrap_or(vec![]);
for feature in dep.features().iter() {
base.push(feature.clone());
if feature.contains("/") {
return Err(human(format!("features in dependencies \
cannot enable features in \
other dependencies: `{}`",
feature)));
}
}
ret.insert(dep.name(), (dep, base));
}
// All features can only point to optional dependencies, in which case they
// should have all been weeded out by the above iteration. Any remaining
// features are bugs in that the package does not actually have those
// features.
if feature_deps.len() > 0 {
let unknown = feature_deps.keys().map(|s| &s[..])
.collect::<Vec<&str>>();
if unknown.len() > 0 {
let features = unknown.connect(", ");
return Err(human(format!("Package `{}` does not have these features: \
`{}`", parent.package_id(), features)))
}
}
// Record what list of features is active for this package.
if used_features.len() > 0 {
let pkgid = parent.package_id();
cx.resolve.features.entry(pkgid.clone())
.or_insert(HashSet::new())
.extend(used_features);
}
Ok(ret)
}
// Returns a pair of (feature dependencies, all used features)
//
// The feature dependencies map is a mapping of package name to list of features
// enabled. Each package should be enabled, and each package should have the
// specified set of features enabled.
//
// The all used features set is the set of features which this local package had
// enabled, which is later used when compiling to instruct the code what
// features were enabled.
fn build_features(s: &Summary, method: Method)
-> CargoResult<(HashMap<String, Vec<String>>, HashSet<String>)> {
let mut deps = HashMap::new();
let mut used = HashSet::new();
let mut visited = HashSet::new();
match method {
Method::Everything => {
for key in s.features().keys() {
| iter | identifier_name |
|
mod.rs | pub fn query(&self, spec: &str) -> CargoResult<&PackageId> {
let spec = try!(PackageIdSpec::parse(spec).chain_error(|| {
human(format!("invalid package id specification: `{}`", spec))
}));
let mut ids = self.iter().filter(|p| spec.matches(*p));
let ret = match ids.next() {
Some(id) => id,
None => return Err(human(format!("package id specification `{}` \
matched no packages", spec))),
};
return match ids.next() {
Some(other) => {
let mut msg = format!("There are multiple `{}` packages in \
your project, and the specification \
`{}` is ambiguous.\n\
Please re-run this command \
with `-p <spec>` where `<spec>` is one \
of the following:",
spec.name(), spec);
let mut vec = vec![ret, other];
vec.extend(ids);
minimize(&mut msg, vec, &spec);
Err(human(msg))
}
None => Ok(ret)
};
fn minimize(msg: &mut String,
ids: Vec<&PackageId>,
spec: &PackageIdSpec) {
let mut version_cnt = HashMap::new();
for id in ids.iter() {
*version_cnt.entry(id.version()).or_insert(0) += 1;
}
for id in ids.iter() {
if version_cnt[id.version()] == 1 {
msg.push_str(&format!("\n {}:{}", spec.name(),
id.version()));
} else {
msg.push_str(&format!("\n {}",
PackageIdSpec::from_package_id(*id)));
}
}
}
}
pub fn features(&self, pkg: &PackageId) -> Option<&HashSet<String>> {
self.features.get(pkg)
}
}
impl fmt::Debug for Resolve {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
try!(write!(fmt, "graph: {:?}\n", self.graph));
try!(write!(fmt, "\nfeatures: {{\n"));
for (pkg, features) in &self.features {
try!(write!(fmt, " {}: {:?}\n", pkg, features));
}
write!(fmt, "}}")
}
}
#[derive(Clone)]
struct Context {
activations: HashMap<(String, SourceId), Vec<Rc<Summary>>>,
resolve: Resolve,
visited: Rc<RefCell<HashSet<PackageId>>>,
}
/// Builds the list of all packages required to build the first argument.
pub fn resolve(summary: &Summary, method: Method,
registry: &mut Registry) -> CargoResult<Resolve> {
trace!("resolve; summary={}", summary.package_id());
let summary = Rc::new(summary.clone());
let cx = Box::new(Context {
resolve: Resolve::new(summary.package_id().clone()),
activations: HashMap::new(),
visited: Rc::new(RefCell::new(HashSet::new())),
});
let _p = profile::start(format!("resolving: {}", summary.package_id()));
match try!(activate(cx, registry, &summary, method)) {
Ok(cx) => {
debug!("resolved: {:?}", cx.resolve);
Ok(cx.resolve)
}
Err(e) => Err(e),
}
}
fn activate(mut cx: Box<Context>,
registry: &mut Registry,
parent: &Rc<Summary>,
method: Method)
-> CargoResult<CargoResult<Box<Context>>> {
// Dependency graphs are required to be a DAG, so we keep a set of
// packages we're visiting and bail if we hit a dupe.
let id = parent.package_id();
if!cx.visited.borrow_mut().insert(id.clone()) {
return Err(human(format!("cyclic package dependency: package `{}` \
depends on itself", id)))
}
// If we're already activated, then that was easy!
if flag_activated(&mut *cx, parent, &method) {
cx.visited.borrow_mut().remove(id);
return Ok(Ok(cx))
}
debug!("activating {}", parent.package_id());
// Extracting the platform request.
let platform = match method {
Method::Required{target_platform: platform,..} => platform,
Method::Everything => None,
};
// First, figure out our set of dependencies based on the requsted set of
// features. This also calculates what features we're going to enable for
// our own dependencies.
let deps = try!(resolve_features(&mut cx, parent, method));
// Next, transform all dependencies into a list of possible candidates which
// can satisfy that dependency.
let mut deps = try!(deps.into_iter().map(|(_dep_name, (dep, features))| {
let mut candidates = try!(registry.query(dep));
// When we attempt versions for a package, we'll want to start at the
// maximum version and work our way down.
candidates.sort_by(|a, b| {
b.version().cmp(a.version())
});
let candidates = candidates.into_iter().map(Rc::new).collect::<Vec<_>>();
Ok((dep, candidates, features))
}).collect::<CargoResult<Vec<_>>>());
// When we recurse, attempt to resolve dependencies with fewer candidates
// before recursing on dependencies with more candidates. This way if the
// dependency with only one candidate can't be resolved we don't have to do
// a bunch of work before we figure that out.
deps.sort_by(|&(_, ref a, _), &(_, ref b, _)| {
a.len().cmp(&b.len())
});
// Workaround compilation error: `deps` does not live long enough
let platform = platform.map(|s| &*s);
Ok(match try!(activate_deps(cx, registry, parent, platform, &deps, 0)) {
Ok(cx) => {
cx.visited.borrow_mut().remove(parent.package_id());
Ok(cx)
}
Err(e) => Err(e),
})
}
// Activate this summary by inserting it into our list of known activations.
//
// Returns if this summary with the given method is already activated.
fn flag_activated(cx: &mut Context,
summary: &Rc<Summary>,
method: &Method) -> bool {
let id = summary.package_id();
let key = (id.name().to_string(), id.source_id().clone());
let prev = cx.activations.entry(key).or_insert(Vec::new());
if!prev.iter().any(|c| c == summary) {
cx.resolve.graph.add(id.clone(), &[]);
prev.push(summary.clone());
return false
}
debug!("checking if {} is already activated", summary.package_id());
let (features, use_default) = match *method {
Method::Required { features, uses_default_features,.. } => {
(features, uses_default_features)
}
Method::Everything => return false,
};
let has_default_feature = summary.features().contains_key("default");
match cx.resolve.features(id) {
Some(prev) => {
features.iter().all(|f| prev.contains(f)) &&
(!use_default || prev.contains("default") ||!has_default_feature)
}
None => features.len() == 0 && (!use_default ||!has_default_feature)
}
}
fn activate_deps<'a>(cx: Box<Context>,
registry: &mut Registry,
parent: &Summary,
platform: Option<&'a str>,
deps: &'a [(&Dependency, Vec<Rc<Summary>>, Vec<String>)],
cur: usize) -> CargoResult<CargoResult<Box<Context>>> {
if cur == deps.len() { return Ok(Ok(cx)) }
let (dep, ref candidates, ref features) = deps[cur];
let method = Method::Required{
dev_deps: false,
features: &features,
uses_default_features: dep.uses_default_features(),
target_platform: platform};
let key = (dep.name().to_string(), dep.source_id().clone());
let prev_active = cx.activations.get(&key)
.map(|v| &v[..]).unwrap_or(&[]);
trace!("{}[{}]>{} {} candidates", parent.name(), cur, dep.name(),
candidates.len());
trace!("{}[{}]>{} {} prev activations", parent.name(), cur,
dep.name(), prev_active.len());
// Filter the set of candidates based on the previously activated
// versions for this dependency. We can actually use a version if it
// precisely matches an activated version or if it is otherwise
// incompatible with all other activated versions. Note that we define
// "compatible" here in terms of the semver sense where if the left-most
// nonzero digit is the same they're considered compatible.
let my_candidates = candidates.iter().filter(|&b| {
prev_active.iter().any(|a| a == b) ||
prev_active.iter().all(|a| {
!compatible(a.version(), b.version())
})
});
// Alright, for each candidate that's gotten this far, it meets the
// following requirements:
//
// 1. The version matches the dependency requirement listed for this
// package
// 2. There are no activated versions for this package which are
// semver-compatible, or there's an activated version which is
// precisely equal to `candidate`.
//
// This means that we're going to attempt to activate each candidate in
// turn. We could possibly fail to activate each candidate, so we try
// each one in turn.
let mut last_err = None;
for candidate in my_candidates {
trace!("{}[{}]>{} trying {}", parent.name(), cur, dep.name(),
candidate.version());
let mut my_cx = cx.clone();
my_cx.resolve.graph.link(parent.package_id().clone(),
candidate.package_id().clone());
// If we hit an intransitive dependency then clear out the visitation
// list as we can't induce a cycle through transitive dependencies.
if!dep.is_transitive() {
my_cx.visited.borrow_mut().clear();
}
let my_cx = match try!(activate(my_cx, registry, candidate, method)) {
Ok(cx) => cx,
Err(e) => { last_err = Some(e); continue }
};
match try!(activate_deps(my_cx, registry, parent, platform, deps,
cur + 1)) {
Ok(cx) => return Ok(Ok(cx)),
Err(e) => { last_err = Some(e); }
}
}
trace!("{}[{}]>{} -- {:?}", parent.name(), cur, dep.name(),
last_err);
// Oh well, we couldn't activate any of the candidates, so we just can't
// activate this dependency at all
Ok(activation_error(&cx, registry, last_err, parent, dep, prev_active,
&candidates))
}
fn activation_error(cx: &Context,
registry: &mut Registry,
err: Option<Box<CargoError>>,
parent: &Summary,
dep: &Dependency,
prev_active: &[Rc<Summary>],
candidates: &[Rc<Summary>]) -> CargoResult<Box<Context>> {
match err {
Some(e) => return Err(e),
None => {}
}
if candidates.len() > 0 {
let mut msg = format!("failed to select a version for `{}` \
(required by `{}`):\n\
all possible versions conflict with \
previously selected versions of `{}`",
dep.name(), parent.name(),
dep.name());
'outer: for v in prev_active.iter() {
for node in cx.resolve.graph.iter() {
let edges = match cx.resolve.graph.edges(node) {
Some(edges) => edges,
None => continue,
};
for edge in edges {
if edge!= v.package_id() { continue }
msg.push_str(&format!("\n version {} in use by {}",
v.version(), edge));
continue 'outer;
}
}
msg.push_str(&format!("\n version {} in use by??",
v.version()));
}
msg.push_str(&format!("\n possible versions to select: {}",
candidates.iter()
.map(|v| v.version())
.map(|v| v.to_string())
.collect::<Vec<_>>()
.connect(", ")));
return Err(human(msg))
}
// Once we're all the way down here, we're definitely lost in the
// weeds! We didn't actually use any candidates above, so we need to
// give an error message that nothing was found.
//
// Note that we re-query the registry with a new dependency that
// allows any version so we can give some nicer error reporting
// which indicates a few versions that were actually found.
let msg = format!("no matching package named `{}` found \
(required by `{}`)\n\
location searched: {}\n\
version required: {}",
dep.name(), parent.name(),
dep.source_id(),
dep.version_req());
let mut msg = msg;
let all_req = semver::VersionReq::parse("*").unwrap();
let new_dep = dep.clone().set_version_req(all_req);
let mut candidates = try!(registry.query(&new_dep));
candidates.sort_by(|a, b| {
b.version().cmp(a.version())
});
if candidates.len() > 0 {
msg.push_str("\nversions found: ");
for (i, c) in candidates.iter().take(3).enumerate() {
if i!= 0 { msg.push_str(", "); }
msg.push_str(&c.version().to_string());
}
if candidates.len() > 3 {
msg.push_str(",...");
}
}
// If we have a path dependency with a locked version, then this may
// indicate that we updated a sub-package and forgot to run `cargo
// update`. In this case try to print a helpful error!
if dep.source_id().is_path() &&
dep.version_req().to_string().starts_with("=") &&
candidates.len() > 0 {
msg.push_str("\nconsider running `cargo update` to update \
a path dependency's locked version");
}
Err(human(msg))
}
// Returns if `a` and `b` are compatible in the semver sense. This is a
// commutative operation.
//
// Versions `a` and `b` are compatible if their left-most nonzero digit is the
// same.
fn compatible(a: &semver::Version, b: &semver::Version) -> bool {
if a.major!= b.major { return false }
if a.major!= 0 { return true }
if a.minor!= b.minor { return false }
if a.minor!= 0 { return true }
a.patch == b.patch
}
fn resolve_features<'a>(cx: &mut Context, parent: &'a Summary,
method: Method)
-> CargoResult<HashMap<&'a str,
(&'a Dependency, Vec<String>)>> {
let dev_deps = match method {
Method::Everything => true,
Method::Required { dev_deps,.. } => dev_deps,
};
// First, filter by dev-dependencies
let deps = parent.dependencies();
let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps);
// Second, ignoring dependencies that should not be compiled for this platform
let deps = deps.filter(|d| {
match method {
Method::Required{target_platform: Some(ref platform),..} => {
d.is_active_for_platform(platform)
},
_ => true
}
});
let (mut feature_deps, used_features) = try!(build_features(parent, method));
let mut ret = HashMap::new();
| continue
}
let mut base = feature_deps.remove(dep.name()).unwrap_or(vec![]);
for feature in dep.features().iter() {
base.push(feature.clone());
if feature.contains("/") {
return Err(human(format!("features in dependencies \
cannot enable features in \
other dependencies: `{}`",
feature)));
}
}
ret.insert(dep.name(), (dep, base));
}
// All features can only point to optional dependencies, in which case they
// should have all been weeded out by the above iteration. Any remaining
// features are bugs in that the package does not actually have those
// features.
if feature_deps.len() > 0 {
let unknown = feature_deps.keys().map(|s| &s[..])
.collect::<Vec<&str>>();
if unknown.len() > 0 {
let features = unknown.connect(", ");
return Err(human(format!("Package `{}` does not have these features: \
`{}`", parent.package_id(), features)))
}
}
// Record what list of features is active for this package.
if used_features.len() > 0 {
let pkgid = parent.package_id();
cx.resolve.features.entry(pkgid.clone())
.or_insert(HashSet::new())
.extend(used_features);
}
Ok(ret)
}
// Returns a pair of (feature dependencies, all used features)
//
// The feature dependencies map is a mapping of package name to list of features
// enabled. Each package should be enabled, and each package should have the
// specified set of features enabled.
//
// The all used features set is the set of features which this local package had
// enabled, which is later used when compiling to instruct the code what
// features were enabled.
fn build_features(s: &Summary, method: Method)
-> CargoResult<(HashMap<String, Vec<String>>, HashSet<String>)> {
let mut deps = HashMap::new();
let mut used = HashSet::new();
let mut visited = HashSet::new();
match method {
Method::Everything => {
for key in s.features().keys() {
try!(add_feature(s, key, &mut deps, &mut used, &mut visited));
}
for dep in s.dependencies().iter().filter(|d| d.is_optional()) {
try!(add_feature(s, dep.name(), &mut deps, & | // Next, sanitize all requested features by whitelisting all the requested
// features that correspond to optional dependencies
for dep in deps {
// weed out optional dependencies, but not those required
if dep.is_optional() && !feature_deps.contains_key(dep.name()) { | random_line_split |
update.rs | //! Types used to describe updates on graphs.
use std::convert::TryInto;
use std::fs::File;
use std::sync::Mutex;
use crate::errors::{GraphAnnisCoreError, Result};
use crate::serializer::KeySerializer;
use bincode::Options;
use serde::de::Error as DeserializeError;
use serde::de::{MapAccess, Visitor};
use serde::ser::{Error as SerializeError, SerializeMap};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use sstable::{SSIterator, Table, TableBuilder, TableIterator};
use tempfile::NamedTempFile;
/// Describes a single update on the graph.
#[derive(Serialize, Deserialize, Clone, Debug, MallocSizeOf)]
pub enum UpdateEvent {
/// Add a node with a name and type.
AddNode {
node_name: String,
node_type: String,
},
/// Delete a node given by the name.
DeleteNode { node_name: String },
/// Add a label to a the node given by the name.
AddNodeLabel {
node_name: String,
anno_ns: String,
anno_name: String,
anno_value: String,
},
/// Delete a label of an node given by the name of the node and the qualified label name.
DeleteNodeLabel {
node_name: String,
anno_ns: String,
anno_name: String,
},
/// Add an edge between two nodes given by their name.
AddEdge {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
},
/// Delete an existing edge between two nodes given by their name.
DeleteEdge {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
},
/// Add a label to an edge between two nodes.
AddEdgeLabel {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
anno_ns: String,
anno_name: String,
anno_value: String,
},
/// Delete a label from an edge between two nodes.
DeleteEdgeLabel {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
anno_ns: String,
anno_name: String,
},
}
enum ChangeSet {
InProgress {
table_builder: Box<TableBuilder<File>>,
outfile: NamedTempFile,
},
Finished {
table: Table,
},
}
/// A list of changes to apply to an graph.
pub struct GraphUpdate {
changesets: Mutex<Vec<ChangeSet>>,
event_counter: u64,
serialization: bincode::config::DefaultOptions,
}
impl Default for GraphUpdate {
fn default() -> Self {
GraphUpdate::new()
}
}
impl GraphUpdate {
/// Create a new empty list of updates.
pub fn new() -> GraphUpdate {
GraphUpdate {
event_counter: 0,
changesets: Mutex::new(Vec::new()),
serialization: bincode::options(),
}
}
/// Add the given event to the update list.
pub fn add_event(&mut self, event: UpdateEvent) -> Result<()> {
let new_event_counter = self.event_counter + 1;
let key = new_event_counter.create_key();
let value = self.serialization.serialize(&event)?;
let mut changeset = self.changesets.lock()?;
if let ChangeSet::InProgress { table_builder,.. } =
current_inprogress_changeset(&mut changeset)?
{
table_builder.add(&key, &value)?;
self.event_counter = new_event_counter;
}
Ok(())
}
/// Get all changes
pub fn iter(&self) -> Result<GraphUpdateIterator> {
let it = GraphUpdateIterator::new(self)?;
Ok(it)
}
/// Returns `true` if the update list is empty.
pub fn is_empty(&self) -> Result<bool> {
Ok(self.event_counter == 0)
}
// Returns the number of updates.
pub fn len(&self) -> Result<usize> {
let result = self.event_counter.try_into()?;
Ok(result)
}
}
fn finish_all_changesets(changesets: &mut Vec<ChangeSet>) -> Result<()> {
// Remove all changesets from the vector and finish them
let finished: Result<Vec<ChangeSet>> = changesets
.drain(..)
.map(|c| match c {
ChangeSet::InProgress {
table_builder,
outfile,
} => {
table_builder.finish()?;
// Re-open as table
let file = outfile.reopen()?;
let size = file.metadata()?.len();
let table = Table::new(sstable::Options::default(), Box::new(file), size as usize)?;
Ok(ChangeSet::Finished { table })
}
ChangeSet::Finished { table } => Ok(ChangeSet::Finished { table }),
})
.collect();
// Re-add the finished changesets
changesets.extend(finished?);
Ok(())
}
fn current_inprogress_changeset(changesets: &mut Vec<ChangeSet>) -> Result<&mut ChangeSet> {
let needs_new_changeset = if let Some(c) = changesets.last_mut() {
match c {
ChangeSet::InProgress {.. } => false,
ChangeSet::Finished {.. } => true,
}
} else {
true
};
if needs_new_changeset {
// Create a new changeset
let outfile = NamedTempFile::new()?;
let table_builder = TableBuilder::new(sstable::Options::default(), outfile.reopen()?);
let c = ChangeSet::InProgress {
table_builder: Box::new(table_builder),
outfile,
};
changesets.push(c);
}
// Get the last changeset, which must be in the InProgress state
changesets
.last_mut()
.ok_or(GraphAnnisCoreError::GraphUpdatePersistanceFileMissing)
}
pub struct GraphUpdateIterator {
iterators: Vec<TableIterator>,
size_hint: u64,
serialization: bincode::config::DefaultOptions,
}
impl GraphUpdateIterator {
fn new(g: &GraphUpdate) -> Result<GraphUpdateIterator> {
let mut changesets = g.changesets.lock()?;
finish_all_changesets(&mut changesets)?;
let iterators: Vec<_> = changesets
.iter()
.filter_map(|c| match c {
ChangeSet::InProgress {.. } => None,
ChangeSet::Finished { table } => {
let mut it = table.iter();
it.seek_to_first();
Some(it)
}
})
.collect();
Ok(GraphUpdateIterator {
size_hint: g.event_counter,
iterators,
serialization: g.serialization,
})
}
}
impl std::iter::Iterator for GraphUpdateIterator {
type Item = Result<(u64, UpdateEvent)>;
fn next(&mut self) -> Option<Self::Item> {
// Remove all empty table iterators.
self.iterators.retain(|it| it.valid());
if let Some(it) = self.iterators.first_mut() {
// Get the current values
if let Some((key, value)) = sstable::current_key_val(it) {
// Create the actual types
let id = match u64::parse_key(&key) {
Ok(id) => id,
Err(e) => return Some(Err(e.into())),
};
let event: UpdateEvent = match self.serialization.deserialize(&value) {
Ok(event) => event,
Err(e) => return Some(Err(e.into())),
};
// Advance for next iteration
it.advance();
return Some(Ok((id, event)));
}
}
None
}
fn size_hint(&self) -> (usize, Option<usize>) {
if let Ok(s) = self.size_hint.try_into() | else {
(0, None)
}
}
}
impl Serialize for GraphUpdate {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
let iter = self.iter().map_err(S::Error::custom)?;
let number_of_updates = self.len().map_err(S::Error::custom)?;
let mut map_serializer = serializer.serialize_map(Some(number_of_updates))?;
for entry in iter {
let (key, value) = entry.map_err(S::Error::custom)?;
map_serializer
.serialize_entry(&key, &value)
.map_err(S::Error::custom)?;
}
map_serializer.end()
}
}
struct GraphUpdateVisitor {}
impl<'de> Visitor<'de> for GraphUpdateVisitor {
type Value = GraphUpdate;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a list of graph updates")
}
fn visit_map<M>(self, mut access: M) -> std::result::Result<Self::Value, M::Error>
where
M: MapAccess<'de>,
{
let serialization = bincode::options();
let outfile = NamedTempFile::new().map_err(M::Error::custom)?;
let mut table_builder = TableBuilder::new(
sstable::Options::default(),
outfile.reopen().map_err(M::Error::custom)?,
);
let mut event_counter = 0;
while let Some((id, event)) = access
.next_entry::<u64, GraphUpdate>()
.map_err(M::Error::custom)?
{
event_counter = id;
let key = id.create_key();
let value = serialization.serialize(&event).map_err(M::Error::custom)?;
table_builder.add(&key, &value).map_err(M::Error::custom)?
}
let c = ChangeSet::InProgress {
outfile,
table_builder: Box::new(table_builder),
};
let mut changesets = vec![c];
finish_all_changesets(&mut changesets).map_err(M::Error::custom)?;
let g = GraphUpdate {
changesets: Mutex::new(changesets),
event_counter,
serialization,
};
Ok(g)
}
}
impl<'de> Deserialize<'de> for GraphUpdate {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_map(GraphUpdateVisitor {})
}
}
| {
(s, Some(s))
} | conditional_block |
update.rs | //! Types used to describe updates on graphs.
use std::convert::TryInto;
use std::fs::File;
use std::sync::Mutex;
use crate::errors::{GraphAnnisCoreError, Result};
use crate::serializer::KeySerializer;
use bincode::Options;
use serde::de::Error as DeserializeError;
use serde::de::{MapAccess, Visitor};
use serde::ser::{Error as SerializeError, SerializeMap};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use sstable::{SSIterator, Table, TableBuilder, TableIterator};
use tempfile::NamedTempFile;
/// Describes a single update on the graph.
#[derive(Serialize, Deserialize, Clone, Debug, MallocSizeOf)]
pub enum UpdateEvent {
/// Add a node with a name and type.
AddNode {
node_name: String,
node_type: String,
},
/// Delete a node given by the name.
DeleteNode { node_name: String },
/// Add a label to a the node given by the name.
AddNodeLabel {
node_name: String,
anno_ns: String,
anno_name: String,
anno_value: String,
},
/// Delete a label of an node given by the name of the node and the qualified label name.
DeleteNodeLabel {
node_name: String,
anno_ns: String,
anno_name: String,
},
/// Add an edge between two nodes given by their name.
AddEdge {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
},
/// Delete an existing edge between two nodes given by their name.
DeleteEdge {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
},
/// Add a label to an edge between two nodes.
AddEdgeLabel {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
anno_ns: String,
anno_name: String,
anno_value: String,
},
/// Delete a label from an edge between two nodes.
DeleteEdgeLabel {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
anno_ns: String,
anno_name: String,
},
}
enum ChangeSet {
InProgress {
table_builder: Box<TableBuilder<File>>,
outfile: NamedTempFile,
},
Finished {
table: Table,
},
}
/// A list of changes to apply to an graph.
pub struct GraphUpdate {
changesets: Mutex<Vec<ChangeSet>>,
event_counter: u64,
serialization: bincode::config::DefaultOptions,
}
impl Default for GraphUpdate {
fn default() -> Self {
GraphUpdate::new()
}
}
impl GraphUpdate {
/// Create a new empty list of updates.
pub fn new() -> GraphUpdate {
GraphUpdate {
event_counter: 0,
changesets: Mutex::new(Vec::new()),
serialization: bincode::options(),
}
}
/// Add the given event to the update list.
pub fn add_event(&mut self, event: UpdateEvent) -> Result<()> {
let new_event_counter = self.event_counter + 1;
let key = new_event_counter.create_key();
let value = self.serialization.serialize(&event)?;
let mut changeset = self.changesets.lock()?;
if let ChangeSet::InProgress { table_builder,.. } =
current_inprogress_changeset(&mut changeset)?
{
table_builder.add(&key, &value)?;
self.event_counter = new_event_counter;
}
Ok(())
}
/// Get all changes
pub fn iter(&self) -> Result<GraphUpdateIterator> {
let it = GraphUpdateIterator::new(self)?;
Ok(it)
}
/// Returns `true` if the update list is empty.
pub fn is_empty(&self) -> Result<bool> {
Ok(self.event_counter == 0)
}
// Returns the number of updates.
pub fn len(&self) -> Result<usize> {
let result = self.event_counter.try_into()?;
Ok(result)
}
}
fn finish_all_changesets(changesets: &mut Vec<ChangeSet>) -> Result<()> {
// Remove all changesets from the vector and finish them
let finished: Result<Vec<ChangeSet>> = changesets
.drain(..)
.map(|c| match c {
ChangeSet::InProgress {
table_builder,
outfile,
} => {
table_builder.finish()?;
// Re-open as table
let file = outfile.reopen()?;
let size = file.metadata()?.len();
let table = Table::new(sstable::Options::default(), Box::new(file), size as usize)?;
Ok(ChangeSet::Finished { table })
}
ChangeSet::Finished { table } => Ok(ChangeSet::Finished { table }),
})
.collect();
// Re-add the finished changesets
changesets.extend(finished?);
Ok(())
}
fn current_inprogress_changeset(changesets: &mut Vec<ChangeSet>) -> Result<&mut ChangeSet> {
let needs_new_changeset = if let Some(c) = changesets.last_mut() {
match c {
ChangeSet::InProgress {.. } => false,
ChangeSet::Finished {.. } => true,
}
} else {
true
};
if needs_new_changeset {
// Create a new changeset
let outfile = NamedTempFile::new()?;
let table_builder = TableBuilder::new(sstable::Options::default(), outfile.reopen()?);
let c = ChangeSet::InProgress {
table_builder: Box::new(table_builder),
outfile,
};
changesets.push(c);
}
// Get the last changeset, which must be in the InProgress state
changesets
.last_mut()
.ok_or(GraphAnnisCoreError::GraphUpdatePersistanceFileMissing)
}
pub struct GraphUpdateIterator {
iterators: Vec<TableIterator>,
size_hint: u64,
serialization: bincode::config::DefaultOptions,
}
impl GraphUpdateIterator {
fn new(g: &GraphUpdate) -> Result<GraphUpdateIterator> {
let mut changesets = g.changesets.lock()?;
finish_all_changesets(&mut changesets)?;
let iterators: Vec<_> = changesets
.iter()
.filter_map(|c| match c {
ChangeSet::InProgress {.. } => None,
ChangeSet::Finished { table } => {
let mut it = table.iter();
it.seek_to_first();
Some(it)
}
})
.collect();
Ok(GraphUpdateIterator {
size_hint: g.event_counter,
iterators,
serialization: g.serialization,
})
}
}
impl std::iter::Iterator for GraphUpdateIterator {
type Item = Result<(u64, UpdateEvent)>;
fn next(&mut self) -> Option<Self::Item> {
// Remove all empty table iterators.
self.iterators.retain(|it| it.valid());
if let Some(it) = self.iterators.first_mut() {
// Get the current values
if let Some((key, value)) = sstable::current_key_val(it) {
// Create the actual types
let id = match u64::parse_key(&key) {
Ok(id) => id,
Err(e) => return Some(Err(e.into())),
};
let event: UpdateEvent = match self.serialization.deserialize(&value) {
Ok(event) => event,
Err(e) => return Some(Err(e.into())),
};
// Advance for next iteration
it.advance();
return Some(Ok((id, event)));
}
}
None
}
| if let Ok(s) = self.size_hint.try_into() {
(s, Some(s))
} else {
(0, None)
}
}
}
impl Serialize for GraphUpdate {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
let iter = self.iter().map_err(S::Error::custom)?;
let number_of_updates = self.len().map_err(S::Error::custom)?;
let mut map_serializer = serializer.serialize_map(Some(number_of_updates))?;
for entry in iter {
let (key, value) = entry.map_err(S::Error::custom)?;
map_serializer
.serialize_entry(&key, &value)
.map_err(S::Error::custom)?;
}
map_serializer.end()
}
}
struct GraphUpdateVisitor {}
impl<'de> Visitor<'de> for GraphUpdateVisitor {
type Value = GraphUpdate;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a list of graph updates")
}
fn visit_map<M>(self, mut access: M) -> std::result::Result<Self::Value, M::Error>
where
M: MapAccess<'de>,
{
let serialization = bincode::options();
let outfile = NamedTempFile::new().map_err(M::Error::custom)?;
let mut table_builder = TableBuilder::new(
sstable::Options::default(),
outfile.reopen().map_err(M::Error::custom)?,
);
let mut event_counter = 0;
while let Some((id, event)) = access
.next_entry::<u64, GraphUpdate>()
.map_err(M::Error::custom)?
{
event_counter = id;
let key = id.create_key();
let value = serialization.serialize(&event).map_err(M::Error::custom)?;
table_builder.add(&key, &value).map_err(M::Error::custom)?
}
let c = ChangeSet::InProgress {
outfile,
table_builder: Box::new(table_builder),
};
let mut changesets = vec![c];
finish_all_changesets(&mut changesets).map_err(M::Error::custom)?;
let g = GraphUpdate {
changesets: Mutex::new(changesets),
event_counter,
serialization,
};
Ok(g)
}
}
impl<'de> Deserialize<'de> for GraphUpdate {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_map(GraphUpdateVisitor {})
}
} | fn size_hint(&self) -> (usize, Option<usize>) { | random_line_split |
update.rs | //! Types used to describe updates on graphs.
use std::convert::TryInto;
use std::fs::File;
use std::sync::Mutex;
use crate::errors::{GraphAnnisCoreError, Result};
use crate::serializer::KeySerializer;
use bincode::Options;
use serde::de::Error as DeserializeError;
use serde::de::{MapAccess, Visitor};
use serde::ser::{Error as SerializeError, SerializeMap};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use sstable::{SSIterator, Table, TableBuilder, TableIterator};
use tempfile::NamedTempFile;
/// Describes a single update on the graph.
#[derive(Serialize, Deserialize, Clone, Debug, MallocSizeOf)]
pub enum UpdateEvent {
/// Add a node with a name and type.
AddNode {
node_name: String,
node_type: String,
},
/// Delete a node given by the name.
DeleteNode { node_name: String },
/// Add a label to a the node given by the name.
AddNodeLabel {
node_name: String,
anno_ns: String,
anno_name: String,
anno_value: String,
},
/// Delete a label of an node given by the name of the node and the qualified label name.
DeleteNodeLabel {
node_name: String,
anno_ns: String,
anno_name: String,
},
/// Add an edge between two nodes given by their name.
AddEdge {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
},
/// Delete an existing edge between two nodes given by their name.
DeleteEdge {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
},
/// Add a label to an edge between two nodes.
AddEdgeLabel {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
anno_ns: String,
anno_name: String,
anno_value: String,
},
/// Delete a label from an edge between two nodes.
DeleteEdgeLabel {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
anno_ns: String,
anno_name: String,
},
}
enum | {
InProgress {
table_builder: Box<TableBuilder<File>>,
outfile: NamedTempFile,
},
Finished {
table: Table,
},
}
/// A list of changes to apply to an graph.
pub struct GraphUpdate {
changesets: Mutex<Vec<ChangeSet>>,
event_counter: u64,
serialization: bincode::config::DefaultOptions,
}
impl Default for GraphUpdate {
fn default() -> Self {
GraphUpdate::new()
}
}
impl GraphUpdate {
/// Create a new empty list of updates.
pub fn new() -> GraphUpdate {
GraphUpdate {
event_counter: 0,
changesets: Mutex::new(Vec::new()),
serialization: bincode::options(),
}
}
/// Add the given event to the update list.
pub fn add_event(&mut self, event: UpdateEvent) -> Result<()> {
let new_event_counter = self.event_counter + 1;
let key = new_event_counter.create_key();
let value = self.serialization.serialize(&event)?;
let mut changeset = self.changesets.lock()?;
if let ChangeSet::InProgress { table_builder,.. } =
current_inprogress_changeset(&mut changeset)?
{
table_builder.add(&key, &value)?;
self.event_counter = new_event_counter;
}
Ok(())
}
/// Get all changes
pub fn iter(&self) -> Result<GraphUpdateIterator> {
let it = GraphUpdateIterator::new(self)?;
Ok(it)
}
/// Returns `true` if the update list is empty.
pub fn is_empty(&self) -> Result<bool> {
Ok(self.event_counter == 0)
}
// Returns the number of updates.
pub fn len(&self) -> Result<usize> {
let result = self.event_counter.try_into()?;
Ok(result)
}
}
fn finish_all_changesets(changesets: &mut Vec<ChangeSet>) -> Result<()> {
// Remove all changesets from the vector and finish them
let finished: Result<Vec<ChangeSet>> = changesets
.drain(..)
.map(|c| match c {
ChangeSet::InProgress {
table_builder,
outfile,
} => {
table_builder.finish()?;
// Re-open as table
let file = outfile.reopen()?;
let size = file.metadata()?.len();
let table = Table::new(sstable::Options::default(), Box::new(file), size as usize)?;
Ok(ChangeSet::Finished { table })
}
ChangeSet::Finished { table } => Ok(ChangeSet::Finished { table }),
})
.collect();
// Re-add the finished changesets
changesets.extend(finished?);
Ok(())
}
fn current_inprogress_changeset(changesets: &mut Vec<ChangeSet>) -> Result<&mut ChangeSet> {
let needs_new_changeset = if let Some(c) = changesets.last_mut() {
match c {
ChangeSet::InProgress {.. } => false,
ChangeSet::Finished {.. } => true,
}
} else {
true
};
if needs_new_changeset {
// Create a new changeset
let outfile = NamedTempFile::new()?;
let table_builder = TableBuilder::new(sstable::Options::default(), outfile.reopen()?);
let c = ChangeSet::InProgress {
table_builder: Box::new(table_builder),
outfile,
};
changesets.push(c);
}
// Get the last changeset, which must be in the InProgress state
changesets
.last_mut()
.ok_or(GraphAnnisCoreError::GraphUpdatePersistanceFileMissing)
}
pub struct GraphUpdateIterator {
iterators: Vec<TableIterator>,
size_hint: u64,
serialization: bincode::config::DefaultOptions,
}
impl GraphUpdateIterator {
fn new(g: &GraphUpdate) -> Result<GraphUpdateIterator> {
let mut changesets = g.changesets.lock()?;
finish_all_changesets(&mut changesets)?;
let iterators: Vec<_> = changesets
.iter()
.filter_map(|c| match c {
ChangeSet::InProgress {.. } => None,
ChangeSet::Finished { table } => {
let mut it = table.iter();
it.seek_to_first();
Some(it)
}
})
.collect();
Ok(GraphUpdateIterator {
size_hint: g.event_counter,
iterators,
serialization: g.serialization,
})
}
}
impl std::iter::Iterator for GraphUpdateIterator {
type Item = Result<(u64, UpdateEvent)>;
fn next(&mut self) -> Option<Self::Item> {
// Remove all empty table iterators.
self.iterators.retain(|it| it.valid());
if let Some(it) = self.iterators.first_mut() {
// Get the current values
if let Some((key, value)) = sstable::current_key_val(it) {
// Create the actual types
let id = match u64::parse_key(&key) {
Ok(id) => id,
Err(e) => return Some(Err(e.into())),
};
let event: UpdateEvent = match self.serialization.deserialize(&value) {
Ok(event) => event,
Err(e) => return Some(Err(e.into())),
};
// Advance for next iteration
it.advance();
return Some(Ok((id, event)));
}
}
None
}
fn size_hint(&self) -> (usize, Option<usize>) {
if let Ok(s) = self.size_hint.try_into() {
(s, Some(s))
} else {
(0, None)
}
}
}
impl Serialize for GraphUpdate {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
let iter = self.iter().map_err(S::Error::custom)?;
let number_of_updates = self.len().map_err(S::Error::custom)?;
let mut map_serializer = serializer.serialize_map(Some(number_of_updates))?;
for entry in iter {
let (key, value) = entry.map_err(S::Error::custom)?;
map_serializer
.serialize_entry(&key, &value)
.map_err(S::Error::custom)?;
}
map_serializer.end()
}
}
struct GraphUpdateVisitor {}
impl<'de> Visitor<'de> for GraphUpdateVisitor {
type Value = GraphUpdate;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a list of graph updates")
}
fn visit_map<M>(self, mut access: M) -> std::result::Result<Self::Value, M::Error>
where
M: MapAccess<'de>,
{
let serialization = bincode::options();
let outfile = NamedTempFile::new().map_err(M::Error::custom)?;
let mut table_builder = TableBuilder::new(
sstable::Options::default(),
outfile.reopen().map_err(M::Error::custom)?,
);
let mut event_counter = 0;
while let Some((id, event)) = access
.next_entry::<u64, GraphUpdate>()
.map_err(M::Error::custom)?
{
event_counter = id;
let key = id.create_key();
let value = serialization.serialize(&event).map_err(M::Error::custom)?;
table_builder.add(&key, &value).map_err(M::Error::custom)?
}
let c = ChangeSet::InProgress {
outfile,
table_builder: Box::new(table_builder),
};
let mut changesets = vec![c];
finish_all_changesets(&mut changesets).map_err(M::Error::custom)?;
let g = GraphUpdate {
changesets: Mutex::new(changesets),
event_counter,
serialization,
};
Ok(g)
}
}
impl<'de> Deserialize<'de> for GraphUpdate {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_map(GraphUpdateVisitor {})
}
}
| ChangeSet | identifier_name |
update.rs | //! Types used to describe updates on graphs.
use std::convert::TryInto;
use std::fs::File;
use std::sync::Mutex;
use crate::errors::{GraphAnnisCoreError, Result};
use crate::serializer::KeySerializer;
use bincode::Options;
use serde::de::Error as DeserializeError;
use serde::de::{MapAccess, Visitor};
use serde::ser::{Error as SerializeError, SerializeMap};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use sstable::{SSIterator, Table, TableBuilder, TableIterator};
use tempfile::NamedTempFile;
/// Describes a single update on the graph.
#[derive(Serialize, Deserialize, Clone, Debug, MallocSizeOf)]
pub enum UpdateEvent {
/// Add a node with a name and type.
AddNode {
node_name: String,
node_type: String,
},
/// Delete a node given by the name.
DeleteNode { node_name: String },
/// Add a label to a the node given by the name.
AddNodeLabel {
node_name: String,
anno_ns: String,
anno_name: String,
anno_value: String,
},
/// Delete a label of an node given by the name of the node and the qualified label name.
DeleteNodeLabel {
node_name: String,
anno_ns: String,
anno_name: String,
},
/// Add an edge between two nodes given by their name.
AddEdge {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
},
/// Delete an existing edge between two nodes given by their name.
DeleteEdge {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
},
/// Add a label to an edge between two nodes.
AddEdgeLabel {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
anno_ns: String,
anno_name: String,
anno_value: String,
},
/// Delete a label from an edge between two nodes.
DeleteEdgeLabel {
source_node: String,
target_node: String,
layer: String,
component_type: String,
component_name: String,
anno_ns: String,
anno_name: String,
},
}
enum ChangeSet {
InProgress {
table_builder: Box<TableBuilder<File>>,
outfile: NamedTempFile,
},
Finished {
table: Table,
},
}
/// A list of changes to apply to an graph.
pub struct GraphUpdate {
changesets: Mutex<Vec<ChangeSet>>,
event_counter: u64,
serialization: bincode::config::DefaultOptions,
}
impl Default for GraphUpdate {
fn default() -> Self {
GraphUpdate::new()
}
}
impl GraphUpdate {
/// Create a new empty list of updates.
pub fn new() -> GraphUpdate {
GraphUpdate {
event_counter: 0,
changesets: Mutex::new(Vec::new()),
serialization: bincode::options(),
}
}
/// Add the given event to the update list.
pub fn add_event(&mut self, event: UpdateEvent) -> Result<()> {
let new_event_counter = self.event_counter + 1;
let key = new_event_counter.create_key();
let value = self.serialization.serialize(&event)?;
let mut changeset = self.changesets.lock()?;
if let ChangeSet::InProgress { table_builder,.. } =
current_inprogress_changeset(&mut changeset)?
{
table_builder.add(&key, &value)?;
self.event_counter = new_event_counter;
}
Ok(())
}
/// Get all changes
pub fn iter(&self) -> Result<GraphUpdateIterator> {
let it = GraphUpdateIterator::new(self)?;
Ok(it)
}
/// Returns `true` if the update list is empty.
pub fn is_empty(&self) -> Result<bool> {
Ok(self.event_counter == 0)
}
// Returns the number of updates.
pub fn len(&self) -> Result<usize> {
let result = self.event_counter.try_into()?;
Ok(result)
}
}
fn finish_all_changesets(changesets: &mut Vec<ChangeSet>) -> Result<()> {
// Remove all changesets from the vector and finish them
let finished: Result<Vec<ChangeSet>> = changesets
.drain(..)
.map(|c| match c {
ChangeSet::InProgress {
table_builder,
outfile,
} => {
table_builder.finish()?;
// Re-open as table
let file = outfile.reopen()?;
let size = file.metadata()?.len();
let table = Table::new(sstable::Options::default(), Box::new(file), size as usize)?;
Ok(ChangeSet::Finished { table })
}
ChangeSet::Finished { table } => Ok(ChangeSet::Finished { table }),
})
.collect();
// Re-add the finished changesets
changesets.extend(finished?);
Ok(())
}
fn current_inprogress_changeset(changesets: &mut Vec<ChangeSet>) -> Result<&mut ChangeSet> {
let needs_new_changeset = if let Some(c) = changesets.last_mut() {
match c {
ChangeSet::InProgress {.. } => false,
ChangeSet::Finished {.. } => true,
}
} else {
true
};
if needs_new_changeset {
// Create a new changeset
let outfile = NamedTempFile::new()?;
let table_builder = TableBuilder::new(sstable::Options::default(), outfile.reopen()?);
let c = ChangeSet::InProgress {
table_builder: Box::new(table_builder),
outfile,
};
changesets.push(c);
}
// Get the last changeset, which must be in the InProgress state
changesets
.last_mut()
.ok_or(GraphAnnisCoreError::GraphUpdatePersistanceFileMissing)
}
pub struct GraphUpdateIterator {
iterators: Vec<TableIterator>,
size_hint: u64,
serialization: bincode::config::DefaultOptions,
}
impl GraphUpdateIterator {
fn new(g: &GraphUpdate) -> Result<GraphUpdateIterator> {
let mut changesets = g.changesets.lock()?;
finish_all_changesets(&mut changesets)?;
let iterators: Vec<_> = changesets
.iter()
.filter_map(|c| match c {
ChangeSet::InProgress {.. } => None,
ChangeSet::Finished { table } => {
let mut it = table.iter();
it.seek_to_first();
Some(it)
}
})
.collect();
Ok(GraphUpdateIterator {
size_hint: g.event_counter,
iterators,
serialization: g.serialization,
})
}
}
impl std::iter::Iterator for GraphUpdateIterator {
type Item = Result<(u64, UpdateEvent)>;
fn next(&mut self) -> Option<Self::Item> | }
}
None
}
fn size_hint(&self) -> (usize, Option<usize>) {
if let Ok(s) = self.size_hint.try_into() {
(s, Some(s))
} else {
(0, None)
}
}
}
impl Serialize for GraphUpdate {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
let iter = self.iter().map_err(S::Error::custom)?;
let number_of_updates = self.len().map_err(S::Error::custom)?;
let mut map_serializer = serializer.serialize_map(Some(number_of_updates))?;
for entry in iter {
let (key, value) = entry.map_err(S::Error::custom)?;
map_serializer
.serialize_entry(&key, &value)
.map_err(S::Error::custom)?;
}
map_serializer.end()
}
}
struct GraphUpdateVisitor {}
impl<'de> Visitor<'de> for GraphUpdateVisitor {
type Value = GraphUpdate;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a list of graph updates")
}
fn visit_map<M>(self, mut access: M) -> std::result::Result<Self::Value, M::Error>
where
M: MapAccess<'de>,
{
let serialization = bincode::options();
let outfile = NamedTempFile::new().map_err(M::Error::custom)?;
let mut table_builder = TableBuilder::new(
sstable::Options::default(),
outfile.reopen().map_err(M::Error::custom)?,
);
let mut event_counter = 0;
while let Some((id, event)) = access
.next_entry::<u64, GraphUpdate>()
.map_err(M::Error::custom)?
{
event_counter = id;
let key = id.create_key();
let value = serialization.serialize(&event).map_err(M::Error::custom)?;
table_builder.add(&key, &value).map_err(M::Error::custom)?
}
let c = ChangeSet::InProgress {
outfile,
table_builder: Box::new(table_builder),
};
let mut changesets = vec![c];
finish_all_changesets(&mut changesets).map_err(M::Error::custom)?;
let g = GraphUpdate {
changesets: Mutex::new(changesets),
event_counter,
serialization,
};
Ok(g)
}
}
impl<'de> Deserialize<'de> for GraphUpdate {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_map(GraphUpdateVisitor {})
}
}
| {
// Remove all empty table iterators.
self.iterators.retain(|it| it.valid());
if let Some(it) = self.iterators.first_mut() {
// Get the current values
if let Some((key, value)) = sstable::current_key_val(it) {
// Create the actual types
let id = match u64::parse_key(&key) {
Ok(id) => id,
Err(e) => return Some(Err(e.into())),
};
let event: UpdateEvent = match self.serialization.deserialize(&value) {
Ok(event) => event,
Err(e) => return Some(Err(e.into())),
};
// Advance for next iteration
it.advance();
return Some(Ok((id, event))); | identifier_body |
main.rs | #![warn(clippy::all)]
#![forbid(unsafe_code)]
// Import from other crates.
use csv::ByteRecord;
use humansize::{file_size_opts, FileSize};
use lazy_static::lazy_static;
use log::debug;
use regex::bytes::Regex;
use std::{
borrow::Cow,
fs,
io::{self, prelude::*},
path::PathBuf,
process,
};
use structopt::StructOpt;
// Modules defined in separate files.
#[macro_use]
mod errors;
mod uniquifier;
mod util;
// Import from our own crates.
use crate::errors::*;
use crate::uniquifier::Uniquifier;
use crate::util::{now, CharSpecifier};
/// Use reasonably large input and output buffers. This seems to give us a
/// performance boost of around 5-10% compared to the standard 8 KiB buffer used
/// by `csv`.
const BUFFER_SIZE: usize = 256 * 1024;
/// Our command-line arguments.
#[derive(Debug, StructOpt)]
#[structopt(
name = "scrubcsv",
about = "Clean and normalize a CSV file.",
after_help = "Read a CSV file, normalize the \"good\" lines, and print them to standard
output. Discard any lines with the wrong number of columns.
Regular expressions use Rust syntax, as described here:
https://doc.rust-lang.org/regex/regex/index.html#syntax
scrubcsv should work with any ASCII-compatible encoding, but it will not
attempt to transcode.
Exit code:
0 on success
1 on error
2 if more than 10% of rows were bad"
)]
struct | {
/// Input file (uses stdin if omitted).
input: Option<PathBuf>,
/// Character used to separate fields in a row (must be a single ASCII
/// byte, or "tab").
#[structopt(
value_name = "CHAR",
short = "d",
long = "delimiter",
default_value = ","
)]
delimiter: CharSpecifier,
/// Convert values matching NULL_REGEX to an empty string. For a case-insensitive
/// match, use `(?i)`: `--null '(?i)NULL'`.
#[structopt(value_name = "NULL_REGEX", short = "n", long = "null")]
null: Option<String>,
/// Replace LF and CRLF sequences in values with spaces. This should improve
/// compatibility with systems like BigQuery that don't expect newlines
/// inside escaped strings.
#[structopt(long = "replace-newlines")]
replace_newlines: bool,
/// Remove whitespace at beginning and end of each cell.
#[structopt(long = "trim-whitespace")]
trim_whitespace: bool,
/// Make sure column names are unique, and use only lowercase letters, numbers
/// and underscores.
#[structopt(long = "clean-column-names")]
clean_column_names: bool,
/// Drop any rows where the specified column is empty or NULL. Can be passed
/// more than once. Useful for cleaning primary key columns before
/// upserting. Uses the cleaned form of column names.
#[structopt(value_name = "COL", long = "drop-row-if-null")]
drop_row_if_null: Vec<String>,
/// Do not print performance information.
#[structopt(short = "q", long = "quiet")]
quiet: bool,
/// Character used to quote entries. May be set to "none" to ignore all
/// quoting.
#[structopt(value_name = "CHAR", long = "quote", default_value = "\"")]
quote: CharSpecifier,
}
lazy_static! {
/// Either a CRLF newline, a LF newline, or a CR newline. Any of these
/// will break certain CSV parsers, including BigQuery's CSV importer.
static ref NEWLINE_RE: Regex = Regex::new(r#"\n|\r\n?"#)
.expect("regex in source code is unparseable");
}
/// This is a helper function called by our `main` function. Unlike
/// `main`, we return a `Result`, which means that we can use `?` and other
/// standard error-handling machinery.
fn run() -> Result<()> {
// Set up logging.
env_logger::init();
// Parse our command-line arguments using `docopt`.
let opt: Opt = Opt::from_args();
debug!("Options: {:#?}", opt);
// Remember the time we started.
let start_time = now();
// Build a regex containing our `--null` value.
let null_re = if let Some(null_re_str) = opt.null.as_ref() {
// Always match the full CSV value.
let s = format!("^{}$", null_re_str);
let re = Regex::new(&s).context("can't compile regular expression")?;
Some(re)
} else {
None
};
// Fetch our input from either standard input or a file. The only tricky
// detail here is that we use a `Box<dyn Read>` to represent "some object
// implementing `Read`, stored on the heap." This allows us to do runtime
// dispatch (as if Rust were object oriented). But because `csv` wraps a
// `BufReader` around the box, we only do that dispatch once per buffer
// flush, not on every tiny write.
let stdin = io::stdin();
let input: Box<dyn Read> = if let Some(ref path) = opt.input {
Box::new(
fs::File::open(path)
.with_context(|_| format!("cannot open {}", path.display()))?,
)
} else {
Box::new(stdin.lock())
};
// Create our CSV reader.
let mut rdr_builder = csv::ReaderBuilder::new();
// Set a reasonable buffer size.
rdr_builder.buffer_capacity(BUFFER_SIZE);
// We need headers so that we can honor --drop-row-if-null.
rdr_builder.has_headers(true);
// Allow records with the wrong number of columns.
rdr_builder.flexible(true);
// Configure our delimiter.
if let Some(delimiter) = opt.delimiter.char() {
rdr_builder.delimiter(delimiter);
} else {
return Err(format_err!("field delimiter is required"));
}
// Configure our quote character.
if let Some(quote) = opt.quote.char() {
rdr_builder.quote(quote);
} else {
rdr_builder.quoting(false);
}
let mut rdr = rdr_builder.from_reader(input);
// We lock `stdout`, giving us exclusive access. In the past, this has made
// an enormous difference in performance.
let stdout = io::stdout();
let output = stdout.lock();
// Create our CSV writer. Note that we _don't_ allow variable numbers
// of columns, non-standard delimiters, or other nonsense: We want our
// output to be highly normalized.
let mut wtr = csv::WriterBuilder::new()
.buffer_capacity(BUFFER_SIZE)
.from_writer(output);
// Get our header and, if we were asked, make sure all the column names are unique.
let mut hdr = rdr
.byte_headers()
.context("cannot read headers")?
.to_owned();
if opt.clean_column_names {
let mut uniquifier = Uniquifier::default();
let mut new_hdr = ByteRecord::default();
for col in hdr.into_iter() {
// Convert from bytes to UTF-8, make unique (and clean), and convert back to bytes.
let col = String::from_utf8_lossy(col);
let col = uniquifier.unique_id_for(&col)?.to_owned();
new_hdr.push_field(col.as_bytes());
}
hdr = new_hdr;
}
// Write our header to our output.
wtr.write_byte_record(&hdr)
.context("cannot write headers")?;
// Calculate the number of expected columns.
let expected_cols = hdr.len();
// Just in case --drop-row-if-null was passed, precompute which columns are
// required to contain a value.
let required_cols = hdr
.iter()
.map(|name| -> bool {
opt.drop_row_if_null
.iter()
.any(|requried_name| requried_name.as_bytes() == name)
})
.collect::<Vec<bool>>();
// Keep track of total rows and malformed rows seen. We count the header as
// a row for backwards compatibility.
let mut rows: u64 = 1;
let mut bad_rows: u64 = 0;
// Can we use the fast path and copy the data through unchanged? Or do we
// need to clean up emebedded newlines in our data? (These break BigQuery,
// for example.)
let use_fast_path = null_re.is_none()
&&!opt.replace_newlines
&&!opt.trim_whitespace
&& opt.drop_row_if_null.is_empty();
// Iterate over all the rows, checking to make sure they look reasonable.
//
// If we use the lowest-level, zero-copy API for `csv`, we can process about
// 225 MB/s. But it turns out we can't do that, because we need to count
// all the row's fields before deciding whether or not to write it out.
'next_row: for record in rdr.byte_records() {
let record = record.context("cannot read record")?;
// Keep track of how many rows we've seen.
rows += 1;
// Check if we have the right number of columns in this row.
if record.len()!= expected_cols {
bad_rows += 1;
debug!(
"row {}: expected {} columns, found {}",
rows,
expected_cols,
record.len(),
);
continue 'next_row;
}
// Decide how to handle this row.
if use_fast_path {
// We don't need to do anything fancy, so just pass it through.
// I'm not sure how much this actually buys us in current Rust
// versions, but it seemed like a good idea at the time.
wtr.write_record(record.into_iter())
.context("cannot write record")?;
} else {
// We need to apply one or more cleanups, so run the slow path.
let cleaned = record.into_iter().map(|mut val: &[u8]| -> Cow<[u8]> {
// Convert values matching `--null` regex to empty strings.
if let Some(ref null_re) = null_re {
if null_re.is_match(val) {
val = &[]
}
}
// Remove whitespace from our cells.
if opt.trim_whitespace {
// We do this manually, because the built-in `trim` only
// works on UTF-8 strings, and we work on any
// "ASCII-compatible" encoding.
let first = val.iter().position(|c|!c.is_ascii_whitespace());
let last = val.iter().rposition(|c|!c.is_ascii_whitespace());
val = match (first, last) {
(Some(first), Some(last)) if first <= last => {
&val[first..=last]
}
(None, None) => &[],
_ => panic!(
"tried to trim {:?}, got impossible indices {:?} {:?}",
val, first, last,
),
};
}
// Fix newlines.
if opt.replace_newlines
&& (val.contains(&b'\n') || val.contains(&b'\r'))
{
NEWLINE_RE.replace_all(val, &b" "[..])
} else {
Cow::Borrowed(val)
}
});
if opt.drop_row_if_null.is_empty() {
// Still somewhat fast!
wtr.write_record(cleaned).context("cannot write record")?;
} else {
// We need to rebuild the record, check for null columns,
// and only output the record if everything's OK.
let row = cleaned.collect::<Vec<Cow<[u8]>>>();
for (value, &is_required_col) in row.iter().zip(required_cols.iter()) {
// If the column is NULL but shouldn't be, bail on this row.
if is_required_col && value.is_empty() {
bad_rows += 1;
debug!("row {}: required column is empty", rows);
continue 'next_row;
}
}
wtr.write_record(row).context("cannot write record")?;
}
}
}
// Flush all our buffers.
wtr.flush().context("error writing records")?;
// Print out some information about our run.
if!opt.quiet {
let ellapsed = (now() - start_time).as_seconds_f64();
let bytes_per_second = (rdr.position().byte() as f64 / ellapsed) as i64;
eprintln!(
"{} rows ({} bad) in {:.2} seconds, {}/sec",
rows,
bad_rows,
ellapsed,
bytes_per_second.file_size(file_size_opts::BINARY)?,
);
}
// If more than 10% of rows are bad, assume something has gone horribly
// wrong.
if bad_rows.checked_mul(10).expect("multiplication overflow") > rows {
eprintln!("Too many rows ({} of {}) were bad", bad_rows, rows);
process::exit(2);
}
Ok(())
}
fn main() {
if let Err(err) = run() {
eprintln!("ERROR: {}", err);
let mut source = err.source();
while let Some(cause) = source {
eprintln!(" caused by: {}", cause);
source = cause.source();
}
process::exit(1);
}
}
| Opt | identifier_name |
main.rs | #![warn(clippy::all)]
#![forbid(unsafe_code)]
// Import from other crates.
use csv::ByteRecord;
use humansize::{file_size_opts, FileSize};
use lazy_static::lazy_static;
use log::debug;
use regex::bytes::Regex;
use std::{
borrow::Cow,
fs,
io::{self, prelude::*},
path::PathBuf,
process,
};
use structopt::StructOpt;
// Modules defined in separate files.
#[macro_use]
mod errors;
mod uniquifier;
mod util;
// Import from our own crates.
use crate::errors::*;
use crate::uniquifier::Uniquifier;
use crate::util::{now, CharSpecifier};
/// Use reasonably large input and output buffers. This seems to give us a
/// performance boost of around 5-10% compared to the standard 8 KiB buffer used
/// by `csv`.
const BUFFER_SIZE: usize = 256 * 1024;
/// Our command-line arguments.
#[derive(Debug, StructOpt)]
#[structopt(
name = "scrubcsv",
about = "Clean and normalize a CSV file.",
after_help = "Read a CSV file, normalize the \"good\" lines, and print them to standard
output. Discard any lines with the wrong number of columns.
Regular expressions use Rust syntax, as described here:
https://doc.rust-lang.org/regex/regex/index.html#syntax
scrubcsv should work with any ASCII-compatible encoding, but it will not
attempt to transcode.
Exit code:
0 on success
1 on error
2 if more than 10% of rows were bad"
)]
struct Opt {
/// Input file (uses stdin if omitted).
input: Option<PathBuf>,
/// Character used to separate fields in a row (must be a single ASCII
/// byte, or "tab").
#[structopt(
value_name = "CHAR",
short = "d",
long = "delimiter",
default_value = ","
)]
delimiter: CharSpecifier,
/// Convert values matching NULL_REGEX to an empty string. For a case-insensitive
/// match, use `(?i)`: `--null '(?i)NULL'`.
#[structopt(value_name = "NULL_REGEX", short = "n", long = "null")]
null: Option<String>,
/// Replace LF and CRLF sequences in values with spaces. This should improve
/// compatibility with systems like BigQuery that don't expect newlines
/// inside escaped strings.
#[structopt(long = "replace-newlines")]
replace_newlines: bool,
/// Remove whitespace at beginning and end of each cell.
#[structopt(long = "trim-whitespace")]
trim_whitespace: bool,
/// Make sure column names are unique, and use only lowercase letters, numbers
/// and underscores.
#[structopt(long = "clean-column-names")]
clean_column_names: bool,
/// Drop any rows where the specified column is empty or NULL. Can be passed
/// more than once. Useful for cleaning primary key columns before
/// upserting. Uses the cleaned form of column names.
#[structopt(value_name = "COL", long = "drop-row-if-null")]
drop_row_if_null: Vec<String>,
/// Do not print performance information.
#[structopt(short = "q", long = "quiet")]
quiet: bool,
/// Character used to quote entries. May be set to "none" to ignore all
/// quoting.
#[structopt(value_name = "CHAR", long = "quote", default_value = "\"")]
quote: CharSpecifier,
}
lazy_static! {
/// Either a CRLF newline, a LF newline, or a CR newline. Any of these
/// will break certain CSV parsers, including BigQuery's CSV importer.
static ref NEWLINE_RE: Regex = Regex::new(r#"\n|\r\n?"#)
.expect("regex in source code is unparseable");
}
/// This is a helper function called by our `main` function. Unlike
/// `main`, we return a `Result`, which means that we can use `?` and other
/// standard error-handling machinery.
fn run() -> Result<()> {
// Set up logging.
env_logger::init();
// Parse our command-line arguments using `docopt`.
let opt: Opt = Opt::from_args();
debug!("Options: {:#?}", opt);
// Remember the time we started.
let start_time = now();
// Build a regex containing our `--null` value.
let null_re = if let Some(null_re_str) = opt.null.as_ref() {
// Always match the full CSV value.
let s = format!("^{}$", null_re_str);
let re = Regex::new(&s).context("can't compile regular expression")?;
Some(re)
} else {
None
};
// Fetch our input from either standard input or a file. The only tricky
// detail here is that we use a `Box<dyn Read>` to represent "some object | // flush, not on every tiny write.
let stdin = io::stdin();
let input: Box<dyn Read> = if let Some(ref path) = opt.input {
Box::new(
fs::File::open(path)
.with_context(|_| format!("cannot open {}", path.display()))?,
)
} else {
Box::new(stdin.lock())
};
// Create our CSV reader.
let mut rdr_builder = csv::ReaderBuilder::new();
// Set a reasonable buffer size.
rdr_builder.buffer_capacity(BUFFER_SIZE);
// We need headers so that we can honor --drop-row-if-null.
rdr_builder.has_headers(true);
// Allow records with the wrong number of columns.
rdr_builder.flexible(true);
// Configure our delimiter.
if let Some(delimiter) = opt.delimiter.char() {
rdr_builder.delimiter(delimiter);
} else {
return Err(format_err!("field delimiter is required"));
}
// Configure our quote character.
if let Some(quote) = opt.quote.char() {
rdr_builder.quote(quote);
} else {
rdr_builder.quoting(false);
}
let mut rdr = rdr_builder.from_reader(input);
// We lock `stdout`, giving us exclusive access. In the past, this has made
// an enormous difference in performance.
let stdout = io::stdout();
let output = stdout.lock();
// Create our CSV writer. Note that we _don't_ allow variable numbers
// of columns, non-standard delimiters, or other nonsense: We want our
// output to be highly normalized.
let mut wtr = csv::WriterBuilder::new()
.buffer_capacity(BUFFER_SIZE)
.from_writer(output);
// Get our header and, if we were asked, make sure all the column names are unique.
let mut hdr = rdr
.byte_headers()
.context("cannot read headers")?
.to_owned();
if opt.clean_column_names {
let mut uniquifier = Uniquifier::default();
let mut new_hdr = ByteRecord::default();
for col in hdr.into_iter() {
// Convert from bytes to UTF-8, make unique (and clean), and convert back to bytes.
let col = String::from_utf8_lossy(col);
let col = uniquifier.unique_id_for(&col)?.to_owned();
new_hdr.push_field(col.as_bytes());
}
hdr = new_hdr;
}
// Write our header to our output.
wtr.write_byte_record(&hdr)
.context("cannot write headers")?;
// Calculate the number of expected columns.
let expected_cols = hdr.len();
// Just in case --drop-row-if-null was passed, precompute which columns are
// required to contain a value.
let required_cols = hdr
.iter()
.map(|name| -> bool {
opt.drop_row_if_null
.iter()
.any(|requried_name| requried_name.as_bytes() == name)
})
.collect::<Vec<bool>>();
// Keep track of total rows and malformed rows seen. We count the header as
// a row for backwards compatibility.
let mut rows: u64 = 1;
let mut bad_rows: u64 = 0;
// Can we use the fast path and copy the data through unchanged? Or do we
// need to clean up emebedded newlines in our data? (These break BigQuery,
// for example.)
let use_fast_path = null_re.is_none()
&&!opt.replace_newlines
&&!opt.trim_whitespace
&& opt.drop_row_if_null.is_empty();
// Iterate over all the rows, checking to make sure they look reasonable.
//
// If we use the lowest-level, zero-copy API for `csv`, we can process about
// 225 MB/s. But it turns out we can't do that, because we need to count
// all the row's fields before deciding whether or not to write it out.
'next_row: for record in rdr.byte_records() {
let record = record.context("cannot read record")?;
// Keep track of how many rows we've seen.
rows += 1;
// Check if we have the right number of columns in this row.
if record.len()!= expected_cols {
bad_rows += 1;
debug!(
"row {}: expected {} columns, found {}",
rows,
expected_cols,
record.len(),
);
continue 'next_row;
}
// Decide how to handle this row.
if use_fast_path {
// We don't need to do anything fancy, so just pass it through.
// I'm not sure how much this actually buys us in current Rust
// versions, but it seemed like a good idea at the time.
wtr.write_record(record.into_iter())
.context("cannot write record")?;
} else {
// We need to apply one or more cleanups, so run the slow path.
let cleaned = record.into_iter().map(|mut val: &[u8]| -> Cow<[u8]> {
// Convert values matching `--null` regex to empty strings.
if let Some(ref null_re) = null_re {
if null_re.is_match(val) {
val = &[]
}
}
// Remove whitespace from our cells.
if opt.trim_whitespace {
// We do this manually, because the built-in `trim` only
// works on UTF-8 strings, and we work on any
// "ASCII-compatible" encoding.
let first = val.iter().position(|c|!c.is_ascii_whitespace());
let last = val.iter().rposition(|c|!c.is_ascii_whitespace());
val = match (first, last) {
(Some(first), Some(last)) if first <= last => {
&val[first..=last]
}
(None, None) => &[],
_ => panic!(
"tried to trim {:?}, got impossible indices {:?} {:?}",
val, first, last,
),
};
}
// Fix newlines.
if opt.replace_newlines
&& (val.contains(&b'\n') || val.contains(&b'\r'))
{
NEWLINE_RE.replace_all(val, &b" "[..])
} else {
Cow::Borrowed(val)
}
});
if opt.drop_row_if_null.is_empty() {
// Still somewhat fast!
wtr.write_record(cleaned).context("cannot write record")?;
} else {
// We need to rebuild the record, check for null columns,
// and only output the record if everything's OK.
let row = cleaned.collect::<Vec<Cow<[u8]>>>();
for (value, &is_required_col) in row.iter().zip(required_cols.iter()) {
// If the column is NULL but shouldn't be, bail on this row.
if is_required_col && value.is_empty() {
bad_rows += 1;
debug!("row {}: required column is empty", rows);
continue 'next_row;
}
}
wtr.write_record(row).context("cannot write record")?;
}
}
}
// Flush all our buffers.
wtr.flush().context("error writing records")?;
// Print out some information about our run.
if!opt.quiet {
let ellapsed = (now() - start_time).as_seconds_f64();
let bytes_per_second = (rdr.position().byte() as f64 / ellapsed) as i64;
eprintln!(
"{} rows ({} bad) in {:.2} seconds, {}/sec",
rows,
bad_rows,
ellapsed,
bytes_per_second.file_size(file_size_opts::BINARY)?,
);
}
// If more than 10% of rows are bad, assume something has gone horribly
// wrong.
if bad_rows.checked_mul(10).expect("multiplication overflow") > rows {
eprintln!("Too many rows ({} of {}) were bad", bad_rows, rows);
process::exit(2);
}
Ok(())
}
fn main() {
if let Err(err) = run() {
eprintln!("ERROR: {}", err);
let mut source = err.source();
while let Some(cause) = source {
eprintln!(" caused by: {}", cause);
source = cause.source();
}
process::exit(1);
}
} | // implementing `Read`, stored on the heap." This allows us to do runtime
// dispatch (as if Rust were object oriented). But because `csv` wraps a
// `BufReader` around the box, we only do that dispatch once per buffer | random_line_split |
main.rs | #![warn(clippy::all)]
#![forbid(unsafe_code)]
// Import from other crates.
use csv::ByteRecord;
use humansize::{file_size_opts, FileSize};
use lazy_static::lazy_static;
use log::debug;
use regex::bytes::Regex;
use std::{
borrow::Cow,
fs,
io::{self, prelude::*},
path::PathBuf,
process,
};
use structopt::StructOpt;
// Modules defined in separate files.
#[macro_use]
mod errors;
mod uniquifier;
mod util;
// Import from our own crates.
use crate::errors::*;
use crate::uniquifier::Uniquifier;
use crate::util::{now, CharSpecifier};
/// Use reasonably large input and output buffers. This seems to give us a
/// performance boost of around 5-10% compared to the standard 8 KiB buffer used
/// by `csv`.
const BUFFER_SIZE: usize = 256 * 1024;
/// Our command-line arguments.
#[derive(Debug, StructOpt)]
#[structopt(
name = "scrubcsv",
about = "Clean and normalize a CSV file.",
after_help = "Read a CSV file, normalize the \"good\" lines, and print them to standard
output. Discard any lines with the wrong number of columns.
Regular expressions use Rust syntax, as described here:
https://doc.rust-lang.org/regex/regex/index.html#syntax
scrubcsv should work with any ASCII-compatible encoding, but it will not
attempt to transcode.
Exit code:
0 on success
1 on error
2 if more than 10% of rows were bad"
)]
struct Opt {
/// Input file (uses stdin if omitted).
input: Option<PathBuf>,
/// Character used to separate fields in a row (must be a single ASCII
/// byte, or "tab").
#[structopt(
value_name = "CHAR",
short = "d",
long = "delimiter",
default_value = ","
)]
delimiter: CharSpecifier,
/// Convert values matching NULL_REGEX to an empty string. For a case-insensitive
/// match, use `(?i)`: `--null '(?i)NULL'`.
#[structopt(value_name = "NULL_REGEX", short = "n", long = "null")]
null: Option<String>,
/// Replace LF and CRLF sequences in values with spaces. This should improve
/// compatibility with systems like BigQuery that don't expect newlines
/// inside escaped strings.
#[structopt(long = "replace-newlines")]
replace_newlines: bool,
/// Remove whitespace at beginning and end of each cell.
#[structopt(long = "trim-whitespace")]
trim_whitespace: bool,
/// Make sure column names are unique, and use only lowercase letters, numbers
/// and underscores.
#[structopt(long = "clean-column-names")]
clean_column_names: bool,
/// Drop any rows where the specified column is empty or NULL. Can be passed
/// more than once. Useful for cleaning primary key columns before
/// upserting. Uses the cleaned form of column names.
#[structopt(value_name = "COL", long = "drop-row-if-null")]
drop_row_if_null: Vec<String>,
/// Do not print performance information.
#[structopt(short = "q", long = "quiet")]
quiet: bool,
/// Character used to quote entries. May be set to "none" to ignore all
/// quoting.
#[structopt(value_name = "CHAR", long = "quote", default_value = "\"")]
quote: CharSpecifier,
}
lazy_static! {
/// Either a CRLF newline, a LF newline, or a CR newline. Any of these
/// will break certain CSV parsers, including BigQuery's CSV importer.
static ref NEWLINE_RE: Regex = Regex::new(r#"\n|\r\n?"#)
.expect("regex in source code is unparseable");
}
/// This is a helper function called by our `main` function. Unlike
/// `main`, we return a `Result`, which means that we can use `?` and other
/// standard error-handling machinery.
fn run() -> Result<()> {
// Set up logging.
env_logger::init();
// Parse our command-line arguments using `docopt`.
let opt: Opt = Opt::from_args();
debug!("Options: {:#?}", opt);
// Remember the time we started.
let start_time = now();
// Build a regex containing our `--null` value.
let null_re = if let Some(null_re_str) = opt.null.as_ref() {
// Always match the full CSV value.
let s = format!("^{}$", null_re_str);
let re = Regex::new(&s).context("can't compile regular expression")?;
Some(re)
} else {
None
};
// Fetch our input from either standard input or a file. The only tricky
// detail here is that we use a `Box<dyn Read>` to represent "some object
// implementing `Read`, stored on the heap." This allows us to do runtime
// dispatch (as if Rust were object oriented). But because `csv` wraps a
// `BufReader` around the box, we only do that dispatch once per buffer
// flush, not on every tiny write.
let stdin = io::stdin();
let input: Box<dyn Read> = if let Some(ref path) = opt.input {
Box::new(
fs::File::open(path)
.with_context(|_| format!("cannot open {}", path.display()))?,
)
} else {
Box::new(stdin.lock())
};
// Create our CSV reader.
let mut rdr_builder = csv::ReaderBuilder::new();
// Set a reasonable buffer size.
rdr_builder.buffer_capacity(BUFFER_SIZE);
// We need headers so that we can honor --drop-row-if-null.
rdr_builder.has_headers(true);
// Allow records with the wrong number of columns.
rdr_builder.flexible(true);
// Configure our delimiter.
if let Some(delimiter) = opt.delimiter.char() {
rdr_builder.delimiter(delimiter);
} else {
return Err(format_err!("field delimiter is required"));
}
// Configure our quote character.
if let Some(quote) = opt.quote.char() {
rdr_builder.quote(quote);
} else {
rdr_builder.quoting(false);
}
let mut rdr = rdr_builder.from_reader(input);
// We lock `stdout`, giving us exclusive access. In the past, this has made
// an enormous difference in performance.
let stdout = io::stdout();
let output = stdout.lock();
// Create our CSV writer. Note that we _don't_ allow variable numbers
// of columns, non-standard delimiters, or other nonsense: We want our
// output to be highly normalized.
let mut wtr = csv::WriterBuilder::new()
.buffer_capacity(BUFFER_SIZE)
.from_writer(output);
// Get our header and, if we were asked, make sure all the column names are unique.
let mut hdr = rdr
.byte_headers()
.context("cannot read headers")?
.to_owned();
if opt.clean_column_names {
let mut uniquifier = Uniquifier::default();
let mut new_hdr = ByteRecord::default();
for col in hdr.into_iter() {
// Convert from bytes to UTF-8, make unique (and clean), and convert back to bytes.
let col = String::from_utf8_lossy(col);
let col = uniquifier.unique_id_for(&col)?.to_owned();
new_hdr.push_field(col.as_bytes());
}
hdr = new_hdr;
}
// Write our header to our output.
wtr.write_byte_record(&hdr)
.context("cannot write headers")?;
// Calculate the number of expected columns.
let expected_cols = hdr.len();
// Just in case --drop-row-if-null was passed, precompute which columns are
// required to contain a value.
let required_cols = hdr
.iter()
.map(|name| -> bool {
opt.drop_row_if_null
.iter()
.any(|requried_name| requried_name.as_bytes() == name)
})
.collect::<Vec<bool>>();
// Keep track of total rows and malformed rows seen. We count the header as
// a row for backwards compatibility.
let mut rows: u64 = 1;
let mut bad_rows: u64 = 0;
// Can we use the fast path and copy the data through unchanged? Or do we
// need to clean up emebedded newlines in our data? (These break BigQuery,
// for example.)
let use_fast_path = null_re.is_none()
&&!opt.replace_newlines
&&!opt.trim_whitespace
&& opt.drop_row_if_null.is_empty();
// Iterate over all the rows, checking to make sure they look reasonable.
//
// If we use the lowest-level, zero-copy API for `csv`, we can process about
// 225 MB/s. But it turns out we can't do that, because we need to count
// all the row's fields before deciding whether or not to write it out.
'next_row: for record in rdr.byte_records() {
let record = record.context("cannot read record")?;
// Keep track of how many rows we've seen.
rows += 1;
// Check if we have the right number of columns in this row.
if record.len()!= expected_cols {
bad_rows += 1;
debug!(
"row {}: expected {} columns, found {}",
rows,
expected_cols,
record.len(),
);
continue 'next_row;
}
// Decide how to handle this row.
if use_fast_path {
// We don't need to do anything fancy, so just pass it through.
// I'm not sure how much this actually buys us in current Rust
// versions, but it seemed like a good idea at the time.
wtr.write_record(record.into_iter())
.context("cannot write record")?;
} else {
// We need to apply one or more cleanups, so run the slow path.
let cleaned = record.into_iter().map(|mut val: &[u8]| -> Cow<[u8]> {
// Convert values matching `--null` regex to empty strings.
if let Some(ref null_re) = null_re |
// Remove whitespace from our cells.
if opt.trim_whitespace {
// We do this manually, because the built-in `trim` only
// works on UTF-8 strings, and we work on any
// "ASCII-compatible" encoding.
let first = val.iter().position(|c|!c.is_ascii_whitespace());
let last = val.iter().rposition(|c|!c.is_ascii_whitespace());
val = match (first, last) {
(Some(first), Some(last)) if first <= last => {
&val[first..=last]
}
(None, None) => &[],
_ => panic!(
"tried to trim {:?}, got impossible indices {:?} {:?}",
val, first, last,
),
};
}
// Fix newlines.
if opt.replace_newlines
&& (val.contains(&b'\n') || val.contains(&b'\r'))
{
NEWLINE_RE.replace_all(val, &b" "[..])
} else {
Cow::Borrowed(val)
}
});
if opt.drop_row_if_null.is_empty() {
// Still somewhat fast!
wtr.write_record(cleaned).context("cannot write record")?;
} else {
// We need to rebuild the record, check for null columns,
// and only output the record if everything's OK.
let row = cleaned.collect::<Vec<Cow<[u8]>>>();
for (value, &is_required_col) in row.iter().zip(required_cols.iter()) {
// If the column is NULL but shouldn't be, bail on this row.
if is_required_col && value.is_empty() {
bad_rows += 1;
debug!("row {}: required column is empty", rows);
continue 'next_row;
}
}
wtr.write_record(row).context("cannot write record")?;
}
}
}
// Flush all our buffers.
wtr.flush().context("error writing records")?;
// Print out some information about our run.
if!opt.quiet {
let ellapsed = (now() - start_time).as_seconds_f64();
let bytes_per_second = (rdr.position().byte() as f64 / ellapsed) as i64;
eprintln!(
"{} rows ({} bad) in {:.2} seconds, {}/sec",
rows,
bad_rows,
ellapsed,
bytes_per_second.file_size(file_size_opts::BINARY)?,
);
}
// If more than 10% of rows are bad, assume something has gone horribly
// wrong.
if bad_rows.checked_mul(10).expect("multiplication overflow") > rows {
eprintln!("Too many rows ({} of {}) were bad", bad_rows, rows);
process::exit(2);
}
Ok(())
}
fn main() {
if let Err(err) = run() {
eprintln!("ERROR: {}", err);
let mut source = err.source();
while let Some(cause) = source {
eprintln!(" caused by: {}", cause);
source = cause.source();
}
process::exit(1);
}
}
| {
if null_re.is_match(val) {
val = &[]
}
} | conditional_block |
extractor.rs | use crate::{
client,
config::{Configuration, CONFIGURATION},
scanner::SCANNED_URLS,
statistics::{
StatCommand::{self, UpdateUsizeField},
StatField::{LinksExtracted, TotalExpected},
},
utils::{format_url, make_request},
FeroxResponse,
};
use lazy_static::lazy_static;
use regex::Regex;
use reqwest::Url;
use std::collections::HashSet;
use tokio::sync::mpsc::UnboundedSender;
/// Regular expression used in [LinkFinder](https://github.com/GerbenJavado/LinkFinder)
///
/// Incorporates change from this [Pull Request](https://github.com/GerbenJavado/LinkFinder/pull/66/files)
const LINKFINDER_REGEX: &str = r#"(?:"|')(((?:[a-zA-Z]{1,10}://|//)[^"'/]{1,}\.[a-zA-Z]{2,}[^"']{0,})|((?:/|\.\./|\./)[^"'><,;| *()(%%$^/\\\[\]][^"'><,;|()]{1,})|([a-zA-Z0-9_\-/]{1,}/[a-zA-Z0-9_\-/]{1,}\.(?:[a-zA-Z]{1,4}|action)(?:[\?|#][^"|']{0,}|))|([a-zA-Z0-9_\-/]{1,}/[a-zA-Z0-9_\-/]{3,}(?:[\?|#][^"|']{0,}|))|([a-zA-Z0-9_\-.]{1,}\.(?:php|asp|aspx|jsp|json|action|html|js|txt|xml)(?:[\?|#][^"|']{0,}|)))(?:"|')"#;
/// Regular expression to pull url paths from robots.txt
///
/// ref: https://developers.google.com/search/reference/robots_txt
const ROBOTS_TXT_REGEX: &str =
r#"(?m)^ *(Allow|Disallow): *(?P<url_path>[a-zA-Z0-9._/?#@!&'()+,;%=-]+?)$"#; // multi-line (?m)
lazy_static! {
/// `LINKFINDER_REGEX` as a regex::Regex type
static ref LINKS_REGEX: Regex = Regex::new(LINKFINDER_REGEX).unwrap();
/// `ROBOTS_TXT_REGEX` as a regex::Regex type
static ref ROBOTS_REGEX: Regex = Regex::new(ROBOTS_TXT_REGEX).unwrap();
}
/// Iterate over a given path, return a list of every sub-path found
///
/// example: `path` contains a link fragment `homepage/assets/img/icons/handshake.svg`
/// the following fragments would be returned:
/// - homepage/assets/img/icons/handshake.svg
/// - homepage/assets/img/icons/
/// - homepage/assets/img/
/// - homepage/assets/
/// - homepage/
fn get_sub_paths_from_path(path: &str) -> Vec<String> {
log::trace!("enter: get_sub_paths_from_path({})", path);
let mut paths = vec![];
// filter out any empty strings caused by.split
let mut parts: Vec<&str> = path.split('/').filter(|s|!s.is_empty()).collect();
let length = parts.len();
for i in 0..length {
// iterate over all parts of the path
if parts.is_empty() {
// pop left us with an empty vector, we're done
break;
}
let mut possible_path = parts.join("/");
if possible_path.is_empty() {
//.join can result in an empty string, which we don't need, ignore
continue;
}
if i > 0 {
// this isn't the last index of the parts array
// ex: /buried/misc/stupidfile.php
// this block skips the file but sees all parent folders
possible_path = format!("{}/", possible_path);
}
paths.push(possible_path); // good sub-path found
parts.pop(); // use.pop() to remove the last part of the path and continue iteration
}
log::trace!("exit: get_sub_paths_from_path -> {:?}", paths);
paths
}
/// simple helper to stay DRY, trys to join a url + fragment and add it to the `links` HashSet
fn add_link_to_set_of_links(link: &str, url: &Url, links: &mut HashSet<String>) {
log::trace!(
"enter: add_link_to_set_of_links({}, {}, {:?})",
link,
url.to_string(),
links
);
match url.join(&link) {
Ok(new_url) => {
links.insert(new_url.to_string());
}
Err(e) => {
log::error!("Could not join given url to the base url: {}", e);
}
}
log::trace!("exit: add_link_to_set_of_links");
}
/// Given a `reqwest::Response`, perform the following actions
/// - parse the response's text for links using the linkfinder regex
/// - for every link found take its url path and parse each sub-path
/// - example: Response contains a link fragment `homepage/assets/img/icons/handshake.svg`
/// with a base url of http://localhost, the following urls would be returned:
/// - homepage/assets/img/icons/handshake.svg
/// - homepage/assets/img/icons/
/// - homepage/assets/img/
/// - homepage/assets/
/// - homepage/
pub async fn get_links(
response: &FeroxResponse,
tx_stats: UnboundedSender<StatCommand>,
) -> HashSet<String> {
log::trace!(
"enter: get_links({}, {:?})",
response.url().as_str(),
tx_stats
);
let mut links = HashSet::<String>::new();
let body = response.text();
for capture in LINKS_REGEX.captures_iter(&body) {
// remove single & double quotes from both ends of the capture
// capture[0] is the entire match, additional capture groups start at [1]
let link = capture[0].trim_matches(|c| c == '\'' || c == '"');
match Url::parse(link) {
Ok(absolute) => {
if absolute.domain()!= response.url().domain()
|| absolute.host()!= response.url().host()
{
// domains/ips are not the same, don't scan things that aren't part of the original
// target url
continue;
}
add_all_sub_paths(absolute.path(), &response, &mut links);
}
Err(e) => {
// this is the expected error that happens when we try to parse a url fragment
// ex: Url::parse("/login") -> Err("relative URL without a base")
// while this is technically an error, these are good results for us
if e.to_string().contains("relative URL without a base") {
add_all_sub_paths(link, &response, &mut links);
} else {
// unexpected error has occurred
log::error!("Could not parse given url: {}", e);
}
}
}
}
let multiplier = CONFIGURATION.extensions.len().max(1);
update_stat!(tx_stats, UpdateUsizeField(LinksExtracted, links.len()));
update_stat!(
tx_stats,
UpdateUsizeField(TotalExpected, links.len() * multiplier)
);
log::trace!("exit: get_links -> {:?}", links);
links
}
/// take a url fragment like homepage/assets/img/icons/handshake.svg and
/// incrementally add
/// - homepage/assets/img/icons/
/// - homepage/assets/img/
/// - homepage/assets/
/// - homepage/
fn add_all_sub_paths(url_path: &str, response: &FeroxResponse, mut links: &mut HashSet<String>) {
log::trace!(
"enter: add_all_sub_paths({}, {}, {:?})",
url_path,
response,
links
);
for sub_path in get_sub_paths_from_path(url_path) {
log::debug!("Adding {} to {:?}", sub_path, links);
add_link_to_set_of_links(&sub_path, &response.url(), &mut links);
}
log::trace!("exit: add_all_sub_paths");
}
/// Wrapper around link extraction logic
/// currently used in two places:
/// - links from response bodys
/// - links from robots.txt responses
///
/// general steps taken:
/// - create a new Url object based on cli options/args
/// - check if the new Url has already been seen/scanned -> None
/// - make a request to the new Url? -> Some(response) : None
pub async fn request_feroxresponse_from_new_link(
url: &str,
tx_stats: UnboundedSender<StatCommand>,
) -> Option<FeroxResponse> {
log::trace!(
"enter: request_feroxresponse_from_new_link({}, {:?})",
url,
tx_stats
);
// create a url based on the given command line options, return None on error
let new_url = match format_url(
&url,
&"",
CONFIGURATION.add_slash,
&CONFIGURATION.queries,
None,
tx_stats.clone(),
) {
Ok(url) => url,
Err(_) => |
};
if SCANNED_URLS.get_scan_by_url(&new_url.to_string()).is_some() {
//we've seen the url before and don't need to scan again
log::trace!("exit: request_feroxresponse_from_new_link -> None");
return None;
}
// make the request and store the response
let new_response = match make_request(&CONFIGURATION.client, &new_url, tx_stats).await {
Ok(resp) => resp,
Err(_) => {
log::trace!("exit: request_feroxresponse_from_new_link -> None");
return None;
}
};
let new_ferox_response = FeroxResponse::from(new_response, true).await;
log::trace!(
"exit: request_feroxresponse_from_new_link -> {:?}",
new_ferox_response
);
Some(new_ferox_response)
}
/// helper function that simply requests /robots.txt on the given url's base url
///
/// example:
/// http://localhost/api/users -> http://localhost/robots.txt
///
/// The length of the given path has no effect on what's requested; it's always
/// base url + /robots.txt
pub async fn request_robots_txt(
base_url: &str,
config: &Configuration,
tx_stats: UnboundedSender<StatCommand>,
) -> Option<FeroxResponse> {
log::trace!(
"enter: get_robots_file({}, CONFIGURATION, {:?})",
base_url,
tx_stats
);
// more often than not, domain/robots.txt will redirect to www.domain/robots.txt or something
// similar; to account for that, create a client that will follow redirects, regardless of
// what the user specified for the scanning client. Other than redirects, it will respect
// all other user specified settings
let follow_redirects = true;
let proxy = if config.proxy.is_empty() {
None
} else {
Some(config.proxy.as_str())
};
let client = client::initialize(
config.timeout,
&config.user_agent,
follow_redirects,
config.insecure,
&config.headers,
proxy,
);
if let Ok(mut url) = Url::parse(base_url) {
url.set_path("/robots.txt"); // overwrite existing path with /robots.txt
if let Ok(response) = make_request(&client, &url, tx_stats).await {
let ferox_response = FeroxResponse::from(response, true).await;
log::trace!("exit: get_robots_file -> {}", ferox_response);
return Some(ferox_response);
}
}
None
}
/// Entry point to perform link extraction from robots.txt
///
/// `base_url` can have paths and subpaths, however robots.txt will be requested from the
/// root of the url
/// given the url:
/// http://localhost/stuff/things
/// this function requests:
/// http://localhost/robots.txt
pub async fn extract_robots_txt(
base_url: &str,
config: &Configuration,
tx_stats: UnboundedSender<StatCommand>,
) -> HashSet<String> {
log::trace!(
"enter: extract_robots_txt({}, CONFIGURATION, {:?})",
base_url,
tx_stats
);
let mut links = HashSet::new();
if let Some(response) = request_robots_txt(&base_url, &config, tx_stats.clone()).await {
for capture in ROBOTS_REGEX.captures_iter(response.text.as_str()) {
if let Some(new_path) = capture.name("url_path") {
if let Ok(mut new_url) = Url::parse(base_url) {
new_url.set_path(new_path.as_str());
add_all_sub_paths(new_url.path(), &response, &mut links);
}
}
}
}
let multiplier = CONFIGURATION.extensions.len().max(1);
update_stat!(tx_stats, UpdateUsizeField(LinksExtracted, links.len()));
update_stat!(
tx_stats,
UpdateUsizeField(TotalExpected, links.len() * multiplier)
);
log::trace!("exit: extract_robots_txt -> {:?}", links);
links
}
#[cfg(test)]
mod tests {
use super::*;
use crate::utils::make_request;
use crate::FeroxChannel;
use httpmock::Method::GET;
use httpmock::MockServer;
use reqwest::Client;
use tokio::sync::mpsc;
#[test]
/// extract sub paths from the given url fragment; expect 4 sub paths and that all are
/// in the expected array
fn extractor_get_sub_paths_from_path_with_multiple_paths() {
let path = "homepage/assets/img/icons/handshake.svg";
let paths = get_sub_paths_from_path(&path);
let expected = vec![
"homepage/",
"homepage/assets/",
"homepage/assets/img/",
"homepage/assets/img/icons/",
"homepage/assets/img/icons/handshake.svg",
];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// extract sub paths from the given url fragment; expect 2 sub paths and that all are
/// in the expected array. the fragment is wrapped in slashes to ensure no empty strings are
/// returned
fn extractor_get_sub_paths_from_path_with_enclosing_slashes() {
let path = "/homepage/assets/";
let paths = get_sub_paths_from_path(&path);
let expected = vec!["homepage/", "homepage/assets"];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// extract sub paths from the given url fragment; expect 1 sub path, no forward slashes are
/// included
fn extractor_get_sub_paths_from_path_with_only_a_word() {
let path = "homepage";
let paths = get_sub_paths_from_path(&path);
let expected = vec!["homepage"];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// extract sub paths from the given url fragment; expect 1 sub path, forward slash removed
fn extractor_get_sub_paths_from_path_with_an_absolute_word() {
let path = "/homepage";
let paths = get_sub_paths_from_path(&path);
let expected = vec!["homepage"];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// test that a full url and fragment are joined correctly, then added to the given list
/// i.e. the happy path
fn extractor_add_link_to_set_of_links_happy_path() {
let url = Url::parse("https://localhost").unwrap();
let mut links = HashSet::<String>::new();
let link = "admin";
assert_eq!(links.len(), 0);
add_link_to_set_of_links(link, &url, &mut links);
assert_eq!(links.len(), 1);
assert!(links.contains("https://localhost/admin"));
}
#[test]
/// test that an invalid path fragment doesn't add anything to the set of links
fn extractor_add_link_to_set_of_links_with_non_base_url() {
let url = Url::parse("https://localhost").unwrap();
let mut links = HashSet::<String>::new();
let link = "\\\\\\\\";
assert_eq!(links.len(), 0);
add_link_to_set_of_links(link, &url, &mut links);
assert_eq!(links.len(), 0);
assert!(links.is_empty());
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
/// use make_request to generate a Response, and use the Response to test get_links;
/// the response will contain an absolute path to a domain that is not part of the scanned
/// domain; expect an empty set returned
async fn extractor_get_links_with_absolute_url_that_differs_from_target_domain(
) -> Result<(), Box<dyn std::error::Error>> {
let srv = MockServer::start();
let mock = srv.mock(|when, then|{
when.method(GET)
.path("/some-path");
then.status(200)
.body("\"http://defintely.not.a.thing.probably.com/homepage/assets/img/icons/handshake.svg\"");
});
let client = Client::new();
let url = Url::parse(&srv.url("/some-path")).unwrap();
let (tx, _): FeroxChannel<StatCommand> = mpsc::unbounded_channel();
let response = make_request(&client, &url, tx.clone()).await.unwrap();
let ferox_response = FeroxResponse::from(response, true).await;
let links = get_links(&ferox_response, tx).await;
assert!(links.is_empty());
assert_eq!(mock.hits(), 1);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
/// test that /robots.txt is correctly requested given a base url (happy path)
async fn request_robots_txt_with_and_without_proxy() {
let srv = MockServer::start();
let mock = srv.mock(|when, then| {
when.method(GET).path("/robots.txt");
then.status(200).body("this is a test");
});
let mut config = Configuration::default();
let (tx, _): FeroxChannel<StatCommand> = mpsc::unbounded_channel();
request_robots_txt(&srv.url("/api/users/stuff/things"), &config, tx.clone()).await;
// note: the proxy doesn't actually do anything other than hit a different code branch
// in this unit test; it would however have an effect on an integration test
config.proxy = srv.url("/ima-proxy");
request_robots_txt(&srv.url("/api/different/path"), &config, tx).await;
assert_eq!(mock.hits(), 2);
}
}
| {
log::trace!("exit: request_feroxresponse_from_new_link -> None");
return None;
} | conditional_block |
extractor.rs | use crate::{
client,
config::{Configuration, CONFIGURATION},
scanner::SCANNED_URLS,
statistics::{
StatCommand::{self, UpdateUsizeField},
StatField::{LinksExtracted, TotalExpected},
},
utils::{format_url, make_request},
FeroxResponse,
};
use lazy_static::lazy_static;
use regex::Regex;
use reqwest::Url;
use std::collections::HashSet;
use tokio::sync::mpsc::UnboundedSender;
/// Regular expression used in [LinkFinder](https://github.com/GerbenJavado/LinkFinder)
///
/// Incorporates change from this [Pull Request](https://github.com/GerbenJavado/LinkFinder/pull/66/files)
const LINKFINDER_REGEX: &str = r#"(?:"|')(((?:[a-zA-Z]{1,10}://|//)[^"'/]{1,}\.[a-zA-Z]{2,}[^"']{0,})|((?:/|\.\./|\./)[^"'><,;| *()(%%$^/\\\[\]][^"'><,;|()]{1,})|([a-zA-Z0-9_\-/]{1,}/[a-zA-Z0-9_\-/]{1,}\.(?:[a-zA-Z]{1,4}|action)(?:[\?|#][^"|']{0,}|))|([a-zA-Z0-9_\-/]{1,}/[a-zA-Z0-9_\-/]{3,}(?:[\?|#][^"|']{0,}|))|([a-zA-Z0-9_\-.]{1,}\.(?:php|asp|aspx|jsp|json|action|html|js|txt|xml)(?:[\?|#][^"|']{0,}|)))(?:"|')"#;
/// Regular expression to pull url paths from robots.txt
///
/// ref: https://developers.google.com/search/reference/robots_txt
const ROBOTS_TXT_REGEX: &str =
r#"(?m)^ *(Allow|Disallow): *(?P<url_path>[a-zA-Z0-9._/?#@!&'()+,;%=-]+?)$"#; // multi-line (?m)
lazy_static! {
/// `LINKFINDER_REGEX` as a regex::Regex type
static ref LINKS_REGEX: Regex = Regex::new(LINKFINDER_REGEX).unwrap();
/// `ROBOTS_TXT_REGEX` as a regex::Regex type
static ref ROBOTS_REGEX: Regex = Regex::new(ROBOTS_TXT_REGEX).unwrap();
}
/// Iterate over a given path, return a list of every sub-path found
///
/// example: `path` contains a link fragment `homepage/assets/img/icons/handshake.svg`
/// the following fragments would be returned:
/// - homepage/assets/img/icons/handshake.svg
/// - homepage/assets/img/icons/
/// - homepage/assets/img/
/// - homepage/assets/
/// - homepage/
fn get_sub_paths_from_path(path: &str) -> Vec<String> {
log::trace!("enter: get_sub_paths_from_path({})", path);
let mut paths = vec![];
// filter out any empty strings caused by.split
let mut parts: Vec<&str> = path.split('/').filter(|s|!s.is_empty()).collect();
let length = parts.len();
for i in 0..length {
// iterate over all parts of the path
if parts.is_empty() {
// pop left us with an empty vector, we're done
break;
}
let mut possible_path = parts.join("/");
if possible_path.is_empty() {
//.join can result in an empty string, which we don't need, ignore
continue;
}
if i > 0 {
// this isn't the last index of the parts array
// ex: /buried/misc/stupidfile.php
// this block skips the file but sees all parent folders
possible_path = format!("{}/", possible_path);
}
paths.push(possible_path); // good sub-path found
parts.pop(); // use.pop() to remove the last part of the path and continue iteration
}
log::trace!("exit: get_sub_paths_from_path -> {:?}", paths);
paths
}
/// simple helper to stay DRY, trys to join a url + fragment and add it to the `links` HashSet
fn add_link_to_set_of_links(link: &str, url: &Url, links: &mut HashSet<String>) {
log::trace!(
"enter: add_link_to_set_of_links({}, {}, {:?})",
link,
url.to_string(),
links
);
match url.join(&link) {
Ok(new_url) => {
links.insert(new_url.to_string());
}
Err(e) => {
log::error!("Could not join given url to the base url: {}", e);
}
}
log::trace!("exit: add_link_to_set_of_links");
}
/// Given a `reqwest::Response`, perform the following actions
/// - parse the response's text for links using the linkfinder regex
/// - for every link found take its url path and parse each sub-path
/// - example: Response contains a link fragment `homepage/assets/img/icons/handshake.svg`
/// with a base url of http://localhost, the following urls would be returned:
/// - homepage/assets/img/icons/handshake.svg
/// - homepage/assets/img/icons/
/// - homepage/assets/img/
/// - homepage/assets/
/// - homepage/
pub async fn | (
response: &FeroxResponse,
tx_stats: UnboundedSender<StatCommand>,
) -> HashSet<String> {
log::trace!(
"enter: get_links({}, {:?})",
response.url().as_str(),
tx_stats
);
let mut links = HashSet::<String>::new();
let body = response.text();
for capture in LINKS_REGEX.captures_iter(&body) {
// remove single & double quotes from both ends of the capture
// capture[0] is the entire match, additional capture groups start at [1]
let link = capture[0].trim_matches(|c| c == '\'' || c == '"');
match Url::parse(link) {
Ok(absolute) => {
if absolute.domain()!= response.url().domain()
|| absolute.host()!= response.url().host()
{
// domains/ips are not the same, don't scan things that aren't part of the original
// target url
continue;
}
add_all_sub_paths(absolute.path(), &response, &mut links);
}
Err(e) => {
// this is the expected error that happens when we try to parse a url fragment
// ex: Url::parse("/login") -> Err("relative URL without a base")
// while this is technically an error, these are good results for us
if e.to_string().contains("relative URL without a base") {
add_all_sub_paths(link, &response, &mut links);
} else {
// unexpected error has occurred
log::error!("Could not parse given url: {}", e);
}
}
}
}
let multiplier = CONFIGURATION.extensions.len().max(1);
update_stat!(tx_stats, UpdateUsizeField(LinksExtracted, links.len()));
update_stat!(
tx_stats,
UpdateUsizeField(TotalExpected, links.len() * multiplier)
);
log::trace!("exit: get_links -> {:?}", links);
links
}
/// take a url fragment like homepage/assets/img/icons/handshake.svg and
/// incrementally add
/// - homepage/assets/img/icons/
/// - homepage/assets/img/
/// - homepage/assets/
/// - homepage/
fn add_all_sub_paths(url_path: &str, response: &FeroxResponse, mut links: &mut HashSet<String>) {
log::trace!(
"enter: add_all_sub_paths({}, {}, {:?})",
url_path,
response,
links
);
for sub_path in get_sub_paths_from_path(url_path) {
log::debug!("Adding {} to {:?}", sub_path, links);
add_link_to_set_of_links(&sub_path, &response.url(), &mut links);
}
log::trace!("exit: add_all_sub_paths");
}
/// Wrapper around link extraction logic
/// currently used in two places:
/// - links from response bodys
/// - links from robots.txt responses
///
/// general steps taken:
/// - create a new Url object based on cli options/args
/// - check if the new Url has already been seen/scanned -> None
/// - make a request to the new Url? -> Some(response) : None
pub async fn request_feroxresponse_from_new_link(
url: &str,
tx_stats: UnboundedSender<StatCommand>,
) -> Option<FeroxResponse> {
log::trace!(
"enter: request_feroxresponse_from_new_link({}, {:?})",
url,
tx_stats
);
// create a url based on the given command line options, return None on error
let new_url = match format_url(
&url,
&"",
CONFIGURATION.add_slash,
&CONFIGURATION.queries,
None,
tx_stats.clone(),
) {
Ok(url) => url,
Err(_) => {
log::trace!("exit: request_feroxresponse_from_new_link -> None");
return None;
}
};
if SCANNED_URLS.get_scan_by_url(&new_url.to_string()).is_some() {
//we've seen the url before and don't need to scan again
log::trace!("exit: request_feroxresponse_from_new_link -> None");
return None;
}
// make the request and store the response
let new_response = match make_request(&CONFIGURATION.client, &new_url, tx_stats).await {
Ok(resp) => resp,
Err(_) => {
log::trace!("exit: request_feroxresponse_from_new_link -> None");
return None;
}
};
let new_ferox_response = FeroxResponse::from(new_response, true).await;
log::trace!(
"exit: request_feroxresponse_from_new_link -> {:?}",
new_ferox_response
);
Some(new_ferox_response)
}
/// helper function that simply requests /robots.txt on the given url's base url
///
/// example:
/// http://localhost/api/users -> http://localhost/robots.txt
///
/// The length of the given path has no effect on what's requested; it's always
/// base url + /robots.txt
pub async fn request_robots_txt(
base_url: &str,
config: &Configuration,
tx_stats: UnboundedSender<StatCommand>,
) -> Option<FeroxResponse> {
log::trace!(
"enter: get_robots_file({}, CONFIGURATION, {:?})",
base_url,
tx_stats
);
// more often than not, domain/robots.txt will redirect to www.domain/robots.txt or something
// similar; to account for that, create a client that will follow redirects, regardless of
// what the user specified for the scanning client. Other than redirects, it will respect
// all other user specified settings
let follow_redirects = true;
let proxy = if config.proxy.is_empty() {
None
} else {
Some(config.proxy.as_str())
};
let client = client::initialize(
config.timeout,
&config.user_agent,
follow_redirects,
config.insecure,
&config.headers,
proxy,
);
if let Ok(mut url) = Url::parse(base_url) {
url.set_path("/robots.txt"); // overwrite existing path with /robots.txt
if let Ok(response) = make_request(&client, &url, tx_stats).await {
let ferox_response = FeroxResponse::from(response, true).await;
log::trace!("exit: get_robots_file -> {}", ferox_response);
return Some(ferox_response);
}
}
None
}
/// Entry point to perform link extraction from robots.txt
///
/// `base_url` can have paths and subpaths, however robots.txt will be requested from the
/// root of the url
/// given the url:
/// http://localhost/stuff/things
/// this function requests:
/// http://localhost/robots.txt
pub async fn extract_robots_txt(
base_url: &str,
config: &Configuration,
tx_stats: UnboundedSender<StatCommand>,
) -> HashSet<String> {
log::trace!(
"enter: extract_robots_txt({}, CONFIGURATION, {:?})",
base_url,
tx_stats
);
let mut links = HashSet::new();
if let Some(response) = request_robots_txt(&base_url, &config, tx_stats.clone()).await {
for capture in ROBOTS_REGEX.captures_iter(response.text.as_str()) {
if let Some(new_path) = capture.name("url_path") {
if let Ok(mut new_url) = Url::parse(base_url) {
new_url.set_path(new_path.as_str());
add_all_sub_paths(new_url.path(), &response, &mut links);
}
}
}
}
let multiplier = CONFIGURATION.extensions.len().max(1);
update_stat!(tx_stats, UpdateUsizeField(LinksExtracted, links.len()));
update_stat!(
tx_stats,
UpdateUsizeField(TotalExpected, links.len() * multiplier)
);
log::trace!("exit: extract_robots_txt -> {:?}", links);
links
}
#[cfg(test)]
mod tests {
use super::*;
use crate::utils::make_request;
use crate::FeroxChannel;
use httpmock::Method::GET;
use httpmock::MockServer;
use reqwest::Client;
use tokio::sync::mpsc;
#[test]
/// extract sub paths from the given url fragment; expect 4 sub paths and that all are
/// in the expected array
fn extractor_get_sub_paths_from_path_with_multiple_paths() {
let path = "homepage/assets/img/icons/handshake.svg";
let paths = get_sub_paths_from_path(&path);
let expected = vec![
"homepage/",
"homepage/assets/",
"homepage/assets/img/",
"homepage/assets/img/icons/",
"homepage/assets/img/icons/handshake.svg",
];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// extract sub paths from the given url fragment; expect 2 sub paths and that all are
/// in the expected array. the fragment is wrapped in slashes to ensure no empty strings are
/// returned
fn extractor_get_sub_paths_from_path_with_enclosing_slashes() {
let path = "/homepage/assets/";
let paths = get_sub_paths_from_path(&path);
let expected = vec!["homepage/", "homepage/assets"];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// extract sub paths from the given url fragment; expect 1 sub path, no forward slashes are
/// included
fn extractor_get_sub_paths_from_path_with_only_a_word() {
let path = "homepage";
let paths = get_sub_paths_from_path(&path);
let expected = vec!["homepage"];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// extract sub paths from the given url fragment; expect 1 sub path, forward slash removed
fn extractor_get_sub_paths_from_path_with_an_absolute_word() {
let path = "/homepage";
let paths = get_sub_paths_from_path(&path);
let expected = vec!["homepage"];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// test that a full url and fragment are joined correctly, then added to the given list
/// i.e. the happy path
fn extractor_add_link_to_set_of_links_happy_path() {
let url = Url::parse("https://localhost").unwrap();
let mut links = HashSet::<String>::new();
let link = "admin";
assert_eq!(links.len(), 0);
add_link_to_set_of_links(link, &url, &mut links);
assert_eq!(links.len(), 1);
assert!(links.contains("https://localhost/admin"));
}
#[test]
/// test that an invalid path fragment doesn't add anything to the set of links
fn extractor_add_link_to_set_of_links_with_non_base_url() {
let url = Url::parse("https://localhost").unwrap();
let mut links = HashSet::<String>::new();
let link = "\\\\\\\\";
assert_eq!(links.len(), 0);
add_link_to_set_of_links(link, &url, &mut links);
assert_eq!(links.len(), 0);
assert!(links.is_empty());
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
/// use make_request to generate a Response, and use the Response to test get_links;
/// the response will contain an absolute path to a domain that is not part of the scanned
/// domain; expect an empty set returned
async fn extractor_get_links_with_absolute_url_that_differs_from_target_domain(
) -> Result<(), Box<dyn std::error::Error>> {
let srv = MockServer::start();
let mock = srv.mock(|when, then|{
when.method(GET)
.path("/some-path");
then.status(200)
.body("\"http://defintely.not.a.thing.probably.com/homepage/assets/img/icons/handshake.svg\"");
});
let client = Client::new();
let url = Url::parse(&srv.url("/some-path")).unwrap();
let (tx, _): FeroxChannel<StatCommand> = mpsc::unbounded_channel();
let response = make_request(&client, &url, tx.clone()).await.unwrap();
let ferox_response = FeroxResponse::from(response, true).await;
let links = get_links(&ferox_response, tx).await;
assert!(links.is_empty());
assert_eq!(mock.hits(), 1);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
/// test that /robots.txt is correctly requested given a base url (happy path)
async fn request_robots_txt_with_and_without_proxy() {
let srv = MockServer::start();
let mock = srv.mock(|when, then| {
when.method(GET).path("/robots.txt");
then.status(200).body("this is a test");
});
let mut config = Configuration::default();
let (tx, _): FeroxChannel<StatCommand> = mpsc::unbounded_channel();
request_robots_txt(&srv.url("/api/users/stuff/things"), &config, tx.clone()).await;
// note: the proxy doesn't actually do anything other than hit a different code branch
// in this unit test; it would however have an effect on an integration test
config.proxy = srv.url("/ima-proxy");
request_robots_txt(&srv.url("/api/different/path"), &config, tx).await;
assert_eq!(mock.hits(), 2);
}
}
| get_links | identifier_name |
extractor.rs | use crate::{
client,
config::{Configuration, CONFIGURATION},
scanner::SCANNED_URLS,
statistics::{
StatCommand::{self, UpdateUsizeField},
StatField::{LinksExtracted, TotalExpected},
},
utils::{format_url, make_request},
FeroxResponse,
};
use lazy_static::lazy_static;
use regex::Regex;
use reqwest::Url;
use std::collections::HashSet;
use tokio::sync::mpsc::UnboundedSender;
/// Regular expression used in [LinkFinder](https://github.com/GerbenJavado/LinkFinder)
///
/// Incorporates change from this [Pull Request](https://github.com/GerbenJavado/LinkFinder/pull/66/files)
const LINKFINDER_REGEX: &str = r#"(?:"|')(((?:[a-zA-Z]{1,10}://|//)[^"'/]{1,}\.[a-zA-Z]{2,}[^"']{0,})|((?:/|\.\./|\./)[^"'><,;| *()(%%$^/\\\[\]][^"'><,;|()]{1,})|([a-zA-Z0-9_\-/]{1,}/[a-zA-Z0-9_\-/]{1,}\.(?:[a-zA-Z]{1,4}|action)(?:[\?|#][^"|']{0,}|))|([a-zA-Z0-9_\-/]{1,}/[a-zA-Z0-9_\-/]{3,}(?:[\?|#][^"|']{0,}|))|([a-zA-Z0-9_\-.]{1,}\.(?:php|asp|aspx|jsp|json|action|html|js|txt|xml)(?:[\?|#][^"|']{0,}|)))(?:"|')"#;
/// Regular expression to pull url paths from robots.txt
///
/// ref: https://developers.google.com/search/reference/robots_txt
const ROBOTS_TXT_REGEX: &str =
r#"(?m)^ *(Allow|Disallow): *(?P<url_path>[a-zA-Z0-9._/?#@!&'()+,;%=-]+?)$"#; // multi-line (?m)
lazy_static! {
/// `LINKFINDER_REGEX` as a regex::Regex type
static ref LINKS_REGEX: Regex = Regex::new(LINKFINDER_REGEX).unwrap();
/// `ROBOTS_TXT_REGEX` as a regex::Regex type
static ref ROBOTS_REGEX: Regex = Regex::new(ROBOTS_TXT_REGEX).unwrap();
}
/// Iterate over a given path, return a list of every sub-path found
///
/// example: `path` contains a link fragment `homepage/assets/img/icons/handshake.svg`
/// the following fragments would be returned:
/// - homepage/assets/img/icons/handshake.svg
/// - homepage/assets/img/icons/
/// - homepage/assets/img/
/// - homepage/assets/
/// - homepage/
fn get_sub_paths_from_path(path: &str) -> Vec<String> {
log::trace!("enter: get_sub_paths_from_path({})", path);
let mut paths = vec![];
// filter out any empty strings caused by.split
let mut parts: Vec<&str> = path.split('/').filter(|s|!s.is_empty()).collect();
let length = parts.len();
for i in 0..length {
// iterate over all parts of the path
if parts.is_empty() {
// pop left us with an empty vector, we're done
break;
}
let mut possible_path = parts.join("/");
if possible_path.is_empty() {
//.join can result in an empty string, which we don't need, ignore
continue;
}
if i > 0 {
// this isn't the last index of the parts array
// ex: /buried/misc/stupidfile.php
// this block skips the file but sees all parent folders
possible_path = format!("{}/", possible_path);
}
paths.push(possible_path); // good sub-path found
parts.pop(); // use.pop() to remove the last part of the path and continue iteration
}
log::trace!("exit: get_sub_paths_from_path -> {:?}", paths);
paths
}
/// simple helper to stay DRY, trys to join a url + fragment and add it to the `links` HashSet
fn add_link_to_set_of_links(link: &str, url: &Url, links: &mut HashSet<String>) {
log::trace!(
"enter: add_link_to_set_of_links({}, {}, {:?})",
link,
url.to_string(),
links
);
match url.join(&link) {
Ok(new_url) => {
links.insert(new_url.to_string());
}
Err(e) => {
log::error!("Could not join given url to the base url: {}", e);
}
}
log::trace!("exit: add_link_to_set_of_links");
}
/// Given a `reqwest::Response`, perform the following actions
/// - parse the response's text for links using the linkfinder regex
/// - for every link found take its url path and parse each sub-path
/// - example: Response contains a link fragment `homepage/assets/img/icons/handshake.svg`
/// with a base url of http://localhost, the following urls would be returned:
/// - homepage/assets/img/icons/handshake.svg
/// - homepage/assets/img/icons/
/// - homepage/assets/img/
/// - homepage/assets/
/// - homepage/
pub async fn get_links(
response: &FeroxResponse,
tx_stats: UnboundedSender<StatCommand>,
) -> HashSet<String> {
log::trace!(
"enter: get_links({}, {:?})",
response.url().as_str(),
tx_stats
);
let mut links = HashSet::<String>::new();
let body = response.text();
for capture in LINKS_REGEX.captures_iter(&body) {
// remove single & double quotes from both ends of the capture
// capture[0] is the entire match, additional capture groups start at [1]
let link = capture[0].trim_matches(|c| c == '\'' || c == '"');
match Url::parse(link) {
Ok(absolute) => {
if absolute.domain()!= response.url().domain()
|| absolute.host()!= response.url().host()
{
// domains/ips are not the same, don't scan things that aren't part of the original
// target url
continue;
}
add_all_sub_paths(absolute.path(), &response, &mut links);
}
Err(e) => {
// this is the expected error that happens when we try to parse a url fragment
// ex: Url::parse("/login") -> Err("relative URL without a base")
// while this is technically an error, these are good results for us
if e.to_string().contains("relative URL without a base") {
add_all_sub_paths(link, &response, &mut links);
} else {
// unexpected error has occurred
log::error!("Could not parse given url: {}", e);
}
}
}
}
let multiplier = CONFIGURATION.extensions.len().max(1);
update_stat!(tx_stats, UpdateUsizeField(LinksExtracted, links.len()));
update_stat!(
tx_stats,
UpdateUsizeField(TotalExpected, links.len() * multiplier)
);
log::trace!("exit: get_links -> {:?}", links);
links
}
/// take a url fragment like homepage/assets/img/icons/handshake.svg and
/// incrementally add
/// - homepage/assets/img/icons/
/// - homepage/assets/img/
/// - homepage/assets/
/// - homepage/
fn add_all_sub_paths(url_path: &str, response: &FeroxResponse, mut links: &mut HashSet<String>) {
log::trace!(
"enter: add_all_sub_paths({}, {}, {:?})",
url_path,
response,
links
);
for sub_path in get_sub_paths_from_path(url_path) {
log::debug!("Adding {} to {:?}", sub_path, links);
add_link_to_set_of_links(&sub_path, &response.url(), &mut links);
}
log::trace!("exit: add_all_sub_paths");
}
/// Wrapper around link extraction logic
/// currently used in two places:
/// - links from response bodys
/// - links from robots.txt responses
///
/// general steps taken:
/// - create a new Url object based on cli options/args
/// - check if the new Url has already been seen/scanned -> None
/// - make a request to the new Url? -> Some(response) : None
pub async fn request_feroxresponse_from_new_link(
url: &str,
tx_stats: UnboundedSender<StatCommand>,
) -> Option<FeroxResponse> {
log::trace!(
"enter: request_feroxresponse_from_new_link({}, {:?})",
url,
tx_stats
);
// create a url based on the given command line options, return None on error
let new_url = match format_url(
&url,
&"",
CONFIGURATION.add_slash,
&CONFIGURATION.queries,
None,
tx_stats.clone(),
) {
Ok(url) => url,
Err(_) => {
log::trace!("exit: request_feroxresponse_from_new_link -> None");
return None;
}
};
if SCANNED_URLS.get_scan_by_url(&new_url.to_string()).is_some() {
//we've seen the url before and don't need to scan again
log::trace!("exit: request_feroxresponse_from_new_link -> None");
return None;
}
// make the request and store the response
let new_response = match make_request(&CONFIGURATION.client, &new_url, tx_stats).await {
Ok(resp) => resp,
Err(_) => {
log::trace!("exit: request_feroxresponse_from_new_link -> None");
return None;
}
};
let new_ferox_response = FeroxResponse::from(new_response, true).await;
log::trace!(
"exit: request_feroxresponse_from_new_link -> {:?}",
new_ferox_response
);
Some(new_ferox_response)
}
/// helper function that simply requests /robots.txt on the given url's base url
///
/// example:
/// http://localhost/api/users -> http://localhost/robots.txt
///
/// The length of the given path has no effect on what's requested; it's always
/// base url + /robots.txt
pub async fn request_robots_txt(
base_url: &str,
config: &Configuration,
tx_stats: UnboundedSender<StatCommand>,
) -> Option<FeroxResponse> {
log::trace!(
"enter: get_robots_file({}, CONFIGURATION, {:?})",
base_url,
tx_stats
);
// more often than not, domain/robots.txt will redirect to www.domain/robots.txt or something
// similar; to account for that, create a client that will follow redirects, regardless of
// what the user specified for the scanning client. Other than redirects, it will respect
// all other user specified settings
let follow_redirects = true;
let proxy = if config.proxy.is_empty() {
None
} else {
Some(config.proxy.as_str())
};
let client = client::initialize(
config.timeout,
&config.user_agent,
follow_redirects,
config.insecure,
&config.headers,
proxy,
);
if let Ok(mut url) = Url::parse(base_url) {
url.set_path("/robots.txt"); // overwrite existing path with /robots.txt
if let Ok(response) = make_request(&client, &url, tx_stats).await {
let ferox_response = FeroxResponse::from(response, true).await;
log::trace!("exit: get_robots_file -> {}", ferox_response);
return Some(ferox_response);
}
}
None
}
/// Entry point to perform link extraction from robots.txt
///
/// `base_url` can have paths and subpaths, however robots.txt will be requested from the
/// root of the url
/// given the url:
/// http://localhost/stuff/things
/// this function requests:
/// http://localhost/robots.txt
pub async fn extract_robots_txt(
base_url: &str,
config: &Configuration,
tx_stats: UnboundedSender<StatCommand>,
) -> HashSet<String> {
log::trace!(
"enter: extract_robots_txt({}, CONFIGURATION, {:?})",
base_url,
tx_stats
);
let mut links = HashSet::new();
if let Some(response) = request_robots_txt(&base_url, &config, tx_stats.clone()).await {
for capture in ROBOTS_REGEX.captures_iter(response.text.as_str()) {
if let Some(new_path) = capture.name("url_path") {
if let Ok(mut new_url) = Url::parse(base_url) {
new_url.set_path(new_path.as_str());
add_all_sub_paths(new_url.path(), &response, &mut links);
}
}
}
}
let multiplier = CONFIGURATION.extensions.len().max(1);
update_stat!(tx_stats, UpdateUsizeField(LinksExtracted, links.len()));
update_stat!(
tx_stats,
UpdateUsizeField(TotalExpected, links.len() * multiplier)
);
log::trace!("exit: extract_robots_txt -> {:?}", links);
links
}
#[cfg(test)]
mod tests {
use super::*;
use crate::utils::make_request;
use crate::FeroxChannel;
use httpmock::Method::GET;
use httpmock::MockServer;
use reqwest::Client;
use tokio::sync::mpsc;
#[test]
/// extract sub paths from the given url fragment; expect 4 sub paths and that all are
/// in the expected array
fn extractor_get_sub_paths_from_path_with_multiple_paths() {
let path = "homepage/assets/img/icons/handshake.svg";
let paths = get_sub_paths_from_path(&path);
let expected = vec![
"homepage/",
"homepage/assets/",
"homepage/assets/img/",
"homepage/assets/img/icons/",
"homepage/assets/img/icons/handshake.svg",
];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// extract sub paths from the given url fragment; expect 2 sub paths and that all are
/// in the expected array. the fragment is wrapped in slashes to ensure no empty strings are
/// returned
fn extractor_get_sub_paths_from_path_with_enclosing_slashes() {
let path = "/homepage/assets/";
let paths = get_sub_paths_from_path(&path);
let expected = vec!["homepage/", "homepage/assets"];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// extract sub paths from the given url fragment; expect 1 sub path, no forward slashes are
/// included
fn extractor_get_sub_paths_from_path_with_only_a_word() {
let path = "homepage";
let paths = get_sub_paths_from_path(&path);
let expected = vec!["homepage"];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// extract sub paths from the given url fragment; expect 1 sub path, forward slash removed
fn extractor_get_sub_paths_from_path_with_an_absolute_word() {
let path = "/homepage";
let paths = get_sub_paths_from_path(&path);
let expected = vec!["homepage"];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// test that a full url and fragment are joined correctly, then added to the given list
/// i.e. the happy path
fn extractor_add_link_to_set_of_links_happy_path() {
let url = Url::parse("https://localhost").unwrap();
let mut links = HashSet::<String>::new();
let link = "admin";
assert_eq!(links.len(), 0);
add_link_to_set_of_links(link, &url, &mut links);
assert_eq!(links.len(), 1);
assert!(links.contains("https://localhost/admin"));
}
#[test]
/// test that an invalid path fragment doesn't add anything to the set of links
fn extractor_add_link_to_set_of_links_with_non_base_url() {
let url = Url::parse("https://localhost").unwrap();
let mut links = HashSet::<String>::new();
let link = "\\\\\\\\";
assert_eq!(links.len(), 0);
add_link_to_set_of_links(link, &url, &mut links);
assert_eq!(links.len(), 0);
assert!(links.is_empty());
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
/// use make_request to generate a Response, and use the Response to test get_links;
/// the response will contain an absolute path to a domain that is not part of the scanned
/// domain; expect an empty set returned
async fn extractor_get_links_with_absolute_url_that_differs_from_target_domain(
) -> Result<(), Box<dyn std::error::Error>> {
let srv = MockServer::start();
let mock = srv.mock(|when, then|{
when.method(GET)
.path("/some-path");
then.status(200)
.body("\"http://defintely.not.a.thing.probably.com/homepage/assets/img/icons/handshake.svg\"");
});
let client = Client::new();
let url = Url::parse(&srv.url("/some-path")).unwrap(); |
let links = get_links(&ferox_response, tx).await;
assert!(links.is_empty());
assert_eq!(mock.hits(), 1);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
/// test that /robots.txt is correctly requested given a base url (happy path)
async fn request_robots_txt_with_and_without_proxy() {
let srv = MockServer::start();
let mock = srv.mock(|when, then| {
when.method(GET).path("/robots.txt");
then.status(200).body("this is a test");
});
let mut config = Configuration::default();
let (tx, _): FeroxChannel<StatCommand> = mpsc::unbounded_channel();
request_robots_txt(&srv.url("/api/users/stuff/things"), &config, tx.clone()).await;
// note: the proxy doesn't actually do anything other than hit a different code branch
// in this unit test; it would however have an effect on an integration test
config.proxy = srv.url("/ima-proxy");
request_robots_txt(&srv.url("/api/different/path"), &config, tx).await;
assert_eq!(mock.hits(), 2);
}
} | let (tx, _): FeroxChannel<StatCommand> = mpsc::unbounded_channel();
let response = make_request(&client, &url, tx.clone()).await.unwrap();
let ferox_response = FeroxResponse::from(response, true).await; | random_line_split |
extractor.rs | use crate::{
client,
config::{Configuration, CONFIGURATION},
scanner::SCANNED_URLS,
statistics::{
StatCommand::{self, UpdateUsizeField},
StatField::{LinksExtracted, TotalExpected},
},
utils::{format_url, make_request},
FeroxResponse,
};
use lazy_static::lazy_static;
use regex::Regex;
use reqwest::Url;
use std::collections::HashSet;
use tokio::sync::mpsc::UnboundedSender;
/// Regular expression used in [LinkFinder](https://github.com/GerbenJavado/LinkFinder)
///
/// Incorporates change from this [Pull Request](https://github.com/GerbenJavado/LinkFinder/pull/66/files)
const LINKFINDER_REGEX: &str = r#"(?:"|')(((?:[a-zA-Z]{1,10}://|//)[^"'/]{1,}\.[a-zA-Z]{2,}[^"']{0,})|((?:/|\.\./|\./)[^"'><,;| *()(%%$^/\\\[\]][^"'><,;|()]{1,})|([a-zA-Z0-9_\-/]{1,}/[a-zA-Z0-9_\-/]{1,}\.(?:[a-zA-Z]{1,4}|action)(?:[\?|#][^"|']{0,}|))|([a-zA-Z0-9_\-/]{1,}/[a-zA-Z0-9_\-/]{3,}(?:[\?|#][^"|']{0,}|))|([a-zA-Z0-9_\-.]{1,}\.(?:php|asp|aspx|jsp|json|action|html|js|txt|xml)(?:[\?|#][^"|']{0,}|)))(?:"|')"#;
/// Regular expression to pull url paths from robots.txt
///
/// ref: https://developers.google.com/search/reference/robots_txt
const ROBOTS_TXT_REGEX: &str =
r#"(?m)^ *(Allow|Disallow): *(?P<url_path>[a-zA-Z0-9._/?#@!&'()+,;%=-]+?)$"#; // multi-line (?m)
lazy_static! {
/// `LINKFINDER_REGEX` as a regex::Regex type
static ref LINKS_REGEX: Regex = Regex::new(LINKFINDER_REGEX).unwrap();
/// `ROBOTS_TXT_REGEX` as a regex::Regex type
static ref ROBOTS_REGEX: Regex = Regex::new(ROBOTS_TXT_REGEX).unwrap();
}
/// Iterate over a given path, return a list of every sub-path found
///
/// example: `path` contains a link fragment `homepage/assets/img/icons/handshake.svg`
/// the following fragments would be returned:
/// - homepage/assets/img/icons/handshake.svg
/// - homepage/assets/img/icons/
/// - homepage/assets/img/
/// - homepage/assets/
/// - homepage/
fn get_sub_paths_from_path(path: &str) -> Vec<String> {
log::trace!("enter: get_sub_paths_from_path({})", path);
let mut paths = vec![];
// filter out any empty strings caused by.split
let mut parts: Vec<&str> = path.split('/').filter(|s|!s.is_empty()).collect();
let length = parts.len();
for i in 0..length {
// iterate over all parts of the path
if parts.is_empty() {
// pop left us with an empty vector, we're done
break;
}
let mut possible_path = parts.join("/");
if possible_path.is_empty() {
//.join can result in an empty string, which we don't need, ignore
continue;
}
if i > 0 {
// this isn't the last index of the parts array
// ex: /buried/misc/stupidfile.php
// this block skips the file but sees all parent folders
possible_path = format!("{}/", possible_path);
}
paths.push(possible_path); // good sub-path found
parts.pop(); // use.pop() to remove the last part of the path and continue iteration
}
log::trace!("exit: get_sub_paths_from_path -> {:?}", paths);
paths
}
/// simple helper to stay DRY, trys to join a url + fragment and add it to the `links` HashSet
fn add_link_to_set_of_links(link: &str, url: &Url, links: &mut HashSet<String>) {
log::trace!(
"enter: add_link_to_set_of_links({}, {}, {:?})",
link,
url.to_string(),
links
);
match url.join(&link) {
Ok(new_url) => {
links.insert(new_url.to_string());
}
Err(e) => {
log::error!("Could not join given url to the base url: {}", e);
}
}
log::trace!("exit: add_link_to_set_of_links");
}
/// Given a `reqwest::Response`, perform the following actions
/// - parse the response's text for links using the linkfinder regex
/// - for every link found take its url path and parse each sub-path
/// - example: Response contains a link fragment `homepage/assets/img/icons/handshake.svg`
/// with a base url of http://localhost, the following urls would be returned:
/// - homepage/assets/img/icons/handshake.svg
/// - homepage/assets/img/icons/
/// - homepage/assets/img/
/// - homepage/assets/
/// - homepage/
pub async fn get_links(
response: &FeroxResponse,
tx_stats: UnboundedSender<StatCommand>,
) -> HashSet<String> {
log::trace!(
"enter: get_links({}, {:?})",
response.url().as_str(),
tx_stats
);
let mut links = HashSet::<String>::new();
let body = response.text();
for capture in LINKS_REGEX.captures_iter(&body) {
// remove single & double quotes from both ends of the capture
// capture[0] is the entire match, additional capture groups start at [1]
let link = capture[0].trim_matches(|c| c == '\'' || c == '"');
match Url::parse(link) {
Ok(absolute) => {
if absolute.domain()!= response.url().domain()
|| absolute.host()!= response.url().host()
{
// domains/ips are not the same, don't scan things that aren't part of the original
// target url
continue;
}
add_all_sub_paths(absolute.path(), &response, &mut links);
}
Err(e) => {
// this is the expected error that happens when we try to parse a url fragment
// ex: Url::parse("/login") -> Err("relative URL without a base")
// while this is technically an error, these are good results for us
if e.to_string().contains("relative URL without a base") {
add_all_sub_paths(link, &response, &mut links);
} else {
// unexpected error has occurred
log::error!("Could not parse given url: {}", e);
}
}
}
}
let multiplier = CONFIGURATION.extensions.len().max(1);
update_stat!(tx_stats, UpdateUsizeField(LinksExtracted, links.len()));
update_stat!(
tx_stats,
UpdateUsizeField(TotalExpected, links.len() * multiplier)
);
log::trace!("exit: get_links -> {:?}", links);
links
}
/// take a url fragment like homepage/assets/img/icons/handshake.svg and
/// incrementally add
/// - homepage/assets/img/icons/
/// - homepage/assets/img/
/// - homepage/assets/
/// - homepage/
fn add_all_sub_paths(url_path: &str, response: &FeroxResponse, mut links: &mut HashSet<String>) {
log::trace!(
"enter: add_all_sub_paths({}, {}, {:?})",
url_path,
response,
links
);
for sub_path in get_sub_paths_from_path(url_path) {
log::debug!("Adding {} to {:?}", sub_path, links);
add_link_to_set_of_links(&sub_path, &response.url(), &mut links);
}
log::trace!("exit: add_all_sub_paths");
}
/// Wrapper around link extraction logic
/// currently used in two places:
/// - links from response bodys
/// - links from robots.txt responses
///
/// general steps taken:
/// - create a new Url object based on cli options/args
/// - check if the new Url has already been seen/scanned -> None
/// - make a request to the new Url? -> Some(response) : None
pub async fn request_feroxresponse_from_new_link(
url: &str,
tx_stats: UnboundedSender<StatCommand>,
) -> Option<FeroxResponse> {
log::trace!(
"enter: request_feroxresponse_from_new_link({}, {:?})",
url,
tx_stats
);
// create a url based on the given command line options, return None on error
let new_url = match format_url(
&url,
&"",
CONFIGURATION.add_slash,
&CONFIGURATION.queries,
None,
tx_stats.clone(),
) {
Ok(url) => url,
Err(_) => {
log::trace!("exit: request_feroxresponse_from_new_link -> None");
return None;
}
};
if SCANNED_URLS.get_scan_by_url(&new_url.to_string()).is_some() {
//we've seen the url before and don't need to scan again
log::trace!("exit: request_feroxresponse_from_new_link -> None");
return None;
}
// make the request and store the response
let new_response = match make_request(&CONFIGURATION.client, &new_url, tx_stats).await {
Ok(resp) => resp,
Err(_) => {
log::trace!("exit: request_feroxresponse_from_new_link -> None");
return None;
}
};
let new_ferox_response = FeroxResponse::from(new_response, true).await;
log::trace!(
"exit: request_feroxresponse_from_new_link -> {:?}",
new_ferox_response
);
Some(new_ferox_response)
}
/// helper function that simply requests /robots.txt on the given url's base url
///
/// example:
/// http://localhost/api/users -> http://localhost/robots.txt
///
/// The length of the given path has no effect on what's requested; it's always
/// base url + /robots.txt
pub async fn request_robots_txt(
base_url: &str,
config: &Configuration,
tx_stats: UnboundedSender<StatCommand>,
) -> Option<FeroxResponse> {
log::trace!(
"enter: get_robots_file({}, CONFIGURATION, {:?})",
base_url,
tx_stats
);
// more often than not, domain/robots.txt will redirect to www.domain/robots.txt or something
// similar; to account for that, create a client that will follow redirects, regardless of
// what the user specified for the scanning client. Other than redirects, it will respect
// all other user specified settings
let follow_redirects = true;
let proxy = if config.proxy.is_empty() {
None
} else {
Some(config.proxy.as_str())
};
let client = client::initialize(
config.timeout,
&config.user_agent,
follow_redirects,
config.insecure,
&config.headers,
proxy,
);
if let Ok(mut url) = Url::parse(base_url) {
url.set_path("/robots.txt"); // overwrite existing path with /robots.txt
if let Ok(response) = make_request(&client, &url, tx_stats).await {
let ferox_response = FeroxResponse::from(response, true).await;
log::trace!("exit: get_robots_file -> {}", ferox_response);
return Some(ferox_response);
}
}
None
}
/// Entry point to perform link extraction from robots.txt
///
/// `base_url` can have paths and subpaths, however robots.txt will be requested from the
/// root of the url
/// given the url:
/// http://localhost/stuff/things
/// this function requests:
/// http://localhost/robots.txt
pub async fn extract_robots_txt(
base_url: &str,
config: &Configuration,
tx_stats: UnboundedSender<StatCommand>,
) -> HashSet<String> {
log::trace!(
"enter: extract_robots_txt({}, CONFIGURATION, {:?})",
base_url,
tx_stats
);
let mut links = HashSet::new();
if let Some(response) = request_robots_txt(&base_url, &config, tx_stats.clone()).await {
for capture in ROBOTS_REGEX.captures_iter(response.text.as_str()) {
if let Some(new_path) = capture.name("url_path") {
if let Ok(mut new_url) = Url::parse(base_url) {
new_url.set_path(new_path.as_str());
add_all_sub_paths(new_url.path(), &response, &mut links);
}
}
}
}
let multiplier = CONFIGURATION.extensions.len().max(1);
update_stat!(tx_stats, UpdateUsizeField(LinksExtracted, links.len()));
update_stat!(
tx_stats,
UpdateUsizeField(TotalExpected, links.len() * multiplier)
);
log::trace!("exit: extract_robots_txt -> {:?}", links);
links
}
#[cfg(test)]
mod tests {
use super::*;
use crate::utils::make_request;
use crate::FeroxChannel;
use httpmock::Method::GET;
use httpmock::MockServer;
use reqwest::Client;
use tokio::sync::mpsc;
#[test]
/// extract sub paths from the given url fragment; expect 4 sub paths and that all are
/// in the expected array
fn extractor_get_sub_paths_from_path_with_multiple_paths() {
let path = "homepage/assets/img/icons/handshake.svg";
let paths = get_sub_paths_from_path(&path);
let expected = vec![
"homepage/",
"homepage/assets/",
"homepage/assets/img/",
"homepage/assets/img/icons/",
"homepage/assets/img/icons/handshake.svg",
];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// extract sub paths from the given url fragment; expect 2 sub paths and that all are
/// in the expected array. the fragment is wrapped in slashes to ensure no empty strings are
/// returned
fn extractor_get_sub_paths_from_path_with_enclosing_slashes() {
let path = "/homepage/assets/";
let paths = get_sub_paths_from_path(&path);
let expected = vec!["homepage/", "homepage/assets"];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// extract sub paths from the given url fragment; expect 1 sub path, no forward slashes are
/// included
fn extractor_get_sub_paths_from_path_with_only_a_word() {
let path = "homepage";
let paths = get_sub_paths_from_path(&path);
let expected = vec!["homepage"];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// extract sub paths from the given url fragment; expect 1 sub path, forward slash removed
fn extractor_get_sub_paths_from_path_with_an_absolute_word() {
let path = "/homepage";
let paths = get_sub_paths_from_path(&path);
let expected = vec!["homepage"];
assert_eq!(paths.len(), expected.len());
for expected_path in expected {
assert_eq!(paths.contains(&expected_path.to_string()), true);
}
}
#[test]
/// test that a full url and fragment are joined correctly, then added to the given list
/// i.e. the happy path
fn extractor_add_link_to_set_of_links_happy_path() {
let url = Url::parse("https://localhost").unwrap();
let mut links = HashSet::<String>::new();
let link = "admin";
assert_eq!(links.len(), 0);
add_link_to_set_of_links(link, &url, &mut links);
assert_eq!(links.len(), 1);
assert!(links.contains("https://localhost/admin"));
}
#[test]
/// test that an invalid path fragment doesn't add anything to the set of links
fn extractor_add_link_to_set_of_links_with_non_base_url() {
let url = Url::parse("https://localhost").unwrap();
let mut links = HashSet::<String>::new();
let link = "\\\\\\\\";
assert_eq!(links.len(), 0);
add_link_to_set_of_links(link, &url, &mut links);
assert_eq!(links.len(), 0);
assert!(links.is_empty());
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
/// use make_request to generate a Response, and use the Response to test get_links;
/// the response will contain an absolute path to a domain that is not part of the scanned
/// domain; expect an empty set returned
async fn extractor_get_links_with_absolute_url_that_differs_from_target_domain(
) -> Result<(), Box<dyn std::error::Error>> {
let srv = MockServer::start();
let mock = srv.mock(|when, then|{
when.method(GET)
.path("/some-path");
then.status(200)
.body("\"http://defintely.not.a.thing.probably.com/homepage/assets/img/icons/handshake.svg\"");
});
let client = Client::new();
let url = Url::parse(&srv.url("/some-path")).unwrap();
let (tx, _): FeroxChannel<StatCommand> = mpsc::unbounded_channel();
let response = make_request(&client, &url, tx.clone()).await.unwrap();
let ferox_response = FeroxResponse::from(response, true).await;
let links = get_links(&ferox_response, tx).await;
assert!(links.is_empty());
assert_eq!(mock.hits(), 1);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
/// test that /robots.txt is correctly requested given a base url (happy path)
async fn request_robots_txt_with_and_without_proxy() | assert_eq!(mock.hits(), 2);
}
}
| {
let srv = MockServer::start();
let mock = srv.mock(|when, then| {
when.method(GET).path("/robots.txt");
then.status(200).body("this is a test");
});
let mut config = Configuration::default();
let (tx, _): FeroxChannel<StatCommand> = mpsc::unbounded_channel();
request_robots_txt(&srv.url("/api/users/stuff/things"), &config, tx.clone()).await;
// note: the proxy doesn't actually do anything other than hit a different code branch
// in this unit test; it would however have an effect on an integration test
config.proxy = srv.url("/ima-proxy");
request_robots_txt(&srv.url("/api/different/path"), &config, tx).await;
| identifier_body |
keyboard.rs | use crate::input::device::{Device, DeviceType};
use crate::input::event_filter::{EventFilter, EventFilterManager};
use crate::input::events::{InputEvent, KeyboardEvent};
use crate::{config::ConfigManager, input::seat::SeatManager};
use log::debug;
use serde::{Deserialize, Serialize};
use std::cell::RefCell;
use std::ops::Deref;
use std::pin::Pin;
use std::rc::{Rc, Weak};
use wlroots_sys::*;
use xkbcommon::xkb;
#[cfg(not(test))]
use xkbcommon::xkb::ffi::xkb_state_ref;
#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
pub struct RepeatRate(u32);
impl Default for RepeatRate {
fn default() -> Self {
RepeatRate(33)
}
}
#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
pub struct RepeatDelay(u32);
impl Default for RepeatDelay {
fn default() -> Self {
RepeatDelay(500)
}
}
#[derive(Default, Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
#[serde(default)]
pub struct KeyboardConfig {
pub xkb_rules: String,
pub xkb_model: String,
pub xkb_layout: String,
pub xkb_variant: String,
pub xkb_options: Option<String>,
pub repeat_rate: RepeatRate,
pub repeat_delay: RepeatDelay,
}
pub struct Keyboard {
seat_manager: Rc<SeatManager>,
event_filter_manager: Rc<EventFilterManager>,
device: Rc<Device>,
keyboard: *mut wlr_keyboard,
xkb_state: RefCell<xkb::State>,
event_manager: RefCell<Option<Pin<Box<KeyboardEventManager>>>>,
}
impl Keyboard {
fn init(
config_manager: Rc<ConfigManager>,
seat_manager: Rc<SeatManager>,
event_filter_manager: Rc<EventFilterManager>,
device: Rc<Device>,
) -> Rc<Keyboard> {
debug!("Keyboard::init: {}", device.name());
let keyboard_ptr = match device.device_type() {
DeviceType::Keyboard(keyboard_ptr) => keyboard_ptr,
_ => panic!("Keyboard::init expects a keyboard device"),
};
let config = &config_manager.config().keyboard;
set_keymap_from_config(keyboard_ptr, config);
let keyboard = Rc::new(Keyboard {
seat_manager,
event_filter_manager,
device: device.clone(),
keyboard: keyboard_ptr,
xkb_state: RefCell::new(unsafe {
xkb::State::from_raw_ptr(xkb_state_ref((*keyboard_ptr).xkb_state))
}),
event_manager: RefCell::new(None),
});
let subscription =
config_manager
.on_config_changed()
.subscribe(listener!(keyboard => move |config| {
set_keymap_from_config(keyboard.raw_ptr(), &config.keyboard);
*keyboard.xkb_state.borrow_mut() = unsafe {
xkb::State::from_raw_ptr(xkb_state_ref((*keyboard_ptr).xkb_state))
};
}));
device.on_destroy.then(listener!(config_manager => move || {
config_manager.on_config_changed().unsubscribe(subscription);
}));
let mut event_manager = KeyboardEventManager::new(Rc::downgrade(&keyboard));
unsafe {
event_manager.modifiers(&mut (*keyboard_ptr).events.modifiers);
event_manager.key(&mut (*keyboard_ptr).events.key);
}
*keyboard.event_manager.borrow_mut() = Some(event_manager);
keyboard
}
pub fn raw_ptr(&self) -> *mut wlr_keyboard {
self.keyboard
}
pub fn device(&self) -> Rc<Device> {
self.device.clone()
}
pub fn xkb_state(&self) -> xkb::State {
self.xkb_state.borrow().clone()
}
}
fn set_keymap_from_config(keyboard_ptr: *mut wlr_keyboard, config: &KeyboardConfig) {
// We need to prepare an XKB keymap and assign it to the keyboard.
let context = xkb::Context::new(xkb::CONTEXT_NO_FLAGS);
let keymap = xkb::Keymap::new_from_names(
&context,
&config.xkb_rules,
&config.xkb_model,
&config.xkb_layout,
&config.xkb_variant,
config.xkb_options.clone(),
xkb::KEYMAP_COMPILE_NO_FLAGS,
)
.expect("xkb::Keymap could not be created");
unsafe {
wlr_keyboard_set_keymap(keyboard_ptr, keymap.get_raw_ptr());
wlr_keyboard_set_repeat_info(
keyboard_ptr,
config.repeat_rate.0 as i32,
config.repeat_delay.0 as i32,
);
}
}
pub(crate) trait KeyboardEventHandler {
fn modifiers(&self);
fn key(&self, event: *const wlr_event_keyboard_key);
}
impl KeyboardEventHandler for Keyboard {
fn modifiers(&self) {
unsafe {
// A seat can only have one keyboard, but this is a limitation of the
// Wayland protocol - not wlroots. We assign all connected keyboards to the
// same seat. You can swap out the underlying wlr_keyboard like this and
// wlr_seat handles this transparently.
wlr_seat_set_keyboard(self.seat_manager.raw_seat(), self.device.raw_ptr());
// Send modifiers to the client.
wlr_seat_keyboard_notify_modifiers(
self.seat_manager.raw_seat(),
&mut (*self.keyboard).modifiers,
);
}
}
fn key(&self, event: *const wlr_event_keyboard_key) {
let event = unsafe { KeyboardEvent::from_ptr(self, event) };
let handled = self.event_filter_manager.handle_keyboard_event(&event);
if!handled |
}
}
wayland_listener!(
KeyboardEventManager,
Weak<Keyboard>,
[
modifiers => modifiers_func: |this: &mut KeyboardEventManager, _data: *mut libc::c_void,| unsafe {
if let Some(handler) = this.data.upgrade() {
handler.modifiers();
}
};
key => key_func: |this: &mut KeyboardEventManager, data: *mut libc::c_void,| unsafe {
if let Some(handler) = this.data.upgrade() {
handler.key(data as _);
}
};
]
);
pub struct KeyboardManager {
config_manager: Rc<ConfigManager>,
seat_manager: Rc<SeatManager>,
event_filter_manager: Rc<EventFilterManager>,
keyboards: RefCell<Vec<Rc<Keyboard>>>,
}
impl KeyboardManager {
pub(crate) fn init(
config_manager: Rc<ConfigManager>,
seat_manager: Rc<SeatManager>,
event_filter_manager: Rc<EventFilterManager>,
) -> Rc<KeyboardManager> {
let keyboard_manager = Rc::new(KeyboardManager {
config_manager,
seat_manager: seat_manager.clone(),
event_filter_manager,
keyboards: RefCell::new(vec![]),
});
seat_manager
.on_new_device
.subscribe(listener!(keyboard_manager => move |device| {
if let DeviceType::Keyboard(_) = device.device_type() {
device.on_destroy.then(listener!(device, keyboard_manager => move || {
keyboard_manager
.keyboards
.borrow_mut()
.retain(|keyboard| keyboard.device.deref()!= device.deref());
keyboard_manager
.seat_manager
.set_has_any_keyboard(keyboard_manager.has_keyboard());
}));
unsafe {
wlr_seat_set_keyboard(keyboard_manager.seat_manager.raw_seat(), device.raw_ptr());
}
let keyboard = Keyboard::init(
keyboard_manager.config_manager.clone(),
keyboard_manager.seat_manager.clone(),
keyboard_manager.event_filter_manager.clone(),
device.clone(),
);
keyboard_manager.keyboards.borrow_mut().push(keyboard);
keyboard_manager.seat_manager.set_has_any_keyboard(true);
}
}));
keyboard_manager
}
pub fn has_keyboard(&self) -> bool {
!self.keyboards.borrow().is_empty()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_util::*;
use std::ptr;
use std::rc::Rc;
#[test]
fn it_drops_and_cleans_up_on_destroy() {
let config_manager = Rc::new(ConfigManager::default());
let seat_manager = SeatManager::mock(ptr::null_mut(), ptr::null_mut());
let event_filter_manager = Rc::new(EventFilterManager::new());
let keyboard_manager = Rc::new(KeyboardManager::init(
config_manager,
seat_manager.clone(),
event_filter_manager,
));
let mut raw_keyboard = wlr_keyboard {
impl_: ptr::null(),
group: ptr::null_mut(),
keymap_string: ptr::null_mut(),
keymap_size: 0,
keymap: ptr::null_mut(),
xkb_state: ptr::null_mut(),
led_indexes: [0; 3],
mod_indexes: [0; 8],
keycodes: [0; 32],
num_keycodes: 0,
modifiers: wlr_keyboard_modifiers {
depressed: 0,
latched: 0,
locked: 0,
group: 0,
},
repeat_info: wlr_keyboard__bindgen_ty_1 { rate: 0, delay: 0 },
events: wlr_keyboard__bindgen_ty_2 {
key: new_wl_signal(),
modifiers: new_wl_signal(),
keymap: new_wl_signal(),
repeat_info: new_wl_signal(),
destroy: new_wl_signal(),
},
data: ptr::null_mut(),
};
let mut device = wlr_input_device {
impl_: ptr::null(),
type_: wlr_input_device_type_WLR_INPUT_DEVICE_KEYBOARD,
vendor: 0,
product: 0,
name: ptr::null_mut(),
width_mm: 0.0,
height_mm: 0.0,
output_name: ptr::null_mut(),
__bindgen_anon_1: wlr_input_device__bindgen_ty_1 {
keyboard: &mut raw_keyboard,
},
events: wlr_input_device__bindgen_ty_2 {
destroy: new_wl_signal(),
},
data: ptr::null_mut(),
link: new_wl_list(),
};
let key_signal = WlSignal::from_ptr(&mut raw_keyboard.events.key);
let modifiers_signal = WlSignal::from_ptr(&mut raw_keyboard.events.modifiers);
let keymap_signal = WlSignal::from_ptr(&mut raw_keyboard.events.keymap);
let repeat_info_signal = WlSignal::from_ptr(&mut raw_keyboard.events.repeat_info);
let destroy_signal = WlSignal::from_ptr(&mut device.events.destroy);
let device = Device::init(&mut device);
let weak_device = Rc::downgrade(&device);
seat_manager.on_new_device.fire(device);
let keyboard = keyboard_manager.keyboards.borrow().first().unwrap().clone();
let weak_keyboard = Rc::downgrade(&keyboard);
drop(keyboard);
assert!(weak_device.upgrade().is_some());
assert!(weak_keyboard.upgrade().is_some());
assert!(key_signal.listener_count() == 1);
assert!(modifiers_signal.listener_count() == 1);
assert!(destroy_signal.listener_count() == 1);
assert!(keyboard_manager.has_keyboard());
destroy_signal.emit();
assert!(key_signal.listener_count() == 0);
assert!(modifiers_signal.listener_count() == 0);
assert!(keymap_signal.listener_count() == 0);
assert!(repeat_info_signal.listener_count() == 0);
assert!(destroy_signal.listener_count() == 0);
assert!(!keyboard_manager.has_keyboard());
assert!(weak_keyboard.upgrade().is_none());
assert!(weak_device.upgrade().is_none());
}
}
#[cfg(test)]
use xkbcommon::xkb::ffi::{xkb_keymap, xkb_state};
#[cfg(test)]
unsafe fn wlr_seat_set_keyboard(_: *mut wlr_seat, _: *mut wlr_input_device) {}
#[cfg(test)]
unsafe fn wlr_keyboard_set_keymap(_: *mut wlr_keyboard, _: *mut xkb_keymap) {}
#[cfg(test)]
unsafe fn wlr_keyboard_set_repeat_info(_: *mut wlr_keyboard, _: i32, _: i32) {}
#[cfg(test)]
unsafe fn xkb_state_ref(ptr: *mut xkb_state) -> *mut xkb_state {
ptr
}
| {
unsafe {
// Otherwise, we pass it along to the client.
wlr_seat_set_keyboard(self.seat_manager.raw_seat(), self.device.raw_ptr());
wlr_seat_keyboard_notify_key(
self.seat_manager.raw_seat(),
event.time_msec(),
event.libinput_keycode(),
event.raw_state(),
);
}
} | conditional_block |
keyboard.rs | use crate::input::device::{Device, DeviceType};
use crate::input::event_filter::{EventFilter, EventFilterManager};
use crate::input::events::{InputEvent, KeyboardEvent};
use crate::{config::ConfigManager, input::seat::SeatManager};
use log::debug;
use serde::{Deserialize, Serialize};
use std::cell::RefCell;
use std::ops::Deref;
use std::pin::Pin;
use std::rc::{Rc, Weak};
use wlroots_sys::*;
use xkbcommon::xkb;
#[cfg(not(test))]
use xkbcommon::xkb::ffi::xkb_state_ref;
#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
pub struct RepeatRate(u32);
impl Default for RepeatRate {
fn default() -> Self {
RepeatRate(33)
}
}
#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
pub struct RepeatDelay(u32);
impl Default for RepeatDelay {
fn default() -> Self {
RepeatDelay(500)
}
}
#[derive(Default, Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
#[serde(default)]
pub struct KeyboardConfig {
pub xkb_rules: String,
pub xkb_model: String,
pub xkb_layout: String,
pub xkb_variant: String,
pub xkb_options: Option<String>,
pub repeat_rate: RepeatRate,
pub repeat_delay: RepeatDelay,
}
pub struct | {
seat_manager: Rc<SeatManager>,
event_filter_manager: Rc<EventFilterManager>,
device: Rc<Device>,
keyboard: *mut wlr_keyboard,
xkb_state: RefCell<xkb::State>,
event_manager: RefCell<Option<Pin<Box<KeyboardEventManager>>>>,
}
impl Keyboard {
fn init(
config_manager: Rc<ConfigManager>,
seat_manager: Rc<SeatManager>,
event_filter_manager: Rc<EventFilterManager>,
device: Rc<Device>,
) -> Rc<Keyboard> {
debug!("Keyboard::init: {}", device.name());
let keyboard_ptr = match device.device_type() {
DeviceType::Keyboard(keyboard_ptr) => keyboard_ptr,
_ => panic!("Keyboard::init expects a keyboard device"),
};
let config = &config_manager.config().keyboard;
set_keymap_from_config(keyboard_ptr, config);
let keyboard = Rc::new(Keyboard {
seat_manager,
event_filter_manager,
device: device.clone(),
keyboard: keyboard_ptr,
xkb_state: RefCell::new(unsafe {
xkb::State::from_raw_ptr(xkb_state_ref((*keyboard_ptr).xkb_state))
}),
event_manager: RefCell::new(None),
});
let subscription =
config_manager
.on_config_changed()
.subscribe(listener!(keyboard => move |config| {
set_keymap_from_config(keyboard.raw_ptr(), &config.keyboard);
*keyboard.xkb_state.borrow_mut() = unsafe {
xkb::State::from_raw_ptr(xkb_state_ref((*keyboard_ptr).xkb_state))
};
}));
device.on_destroy.then(listener!(config_manager => move || {
config_manager.on_config_changed().unsubscribe(subscription);
}));
let mut event_manager = KeyboardEventManager::new(Rc::downgrade(&keyboard));
unsafe {
event_manager.modifiers(&mut (*keyboard_ptr).events.modifiers);
event_manager.key(&mut (*keyboard_ptr).events.key);
}
*keyboard.event_manager.borrow_mut() = Some(event_manager);
keyboard
}
pub fn raw_ptr(&self) -> *mut wlr_keyboard {
self.keyboard
}
pub fn device(&self) -> Rc<Device> {
self.device.clone()
}
pub fn xkb_state(&self) -> xkb::State {
self.xkb_state.borrow().clone()
}
}
fn set_keymap_from_config(keyboard_ptr: *mut wlr_keyboard, config: &KeyboardConfig) {
// We need to prepare an XKB keymap and assign it to the keyboard.
let context = xkb::Context::new(xkb::CONTEXT_NO_FLAGS);
let keymap = xkb::Keymap::new_from_names(
&context,
&config.xkb_rules,
&config.xkb_model,
&config.xkb_layout,
&config.xkb_variant,
config.xkb_options.clone(),
xkb::KEYMAP_COMPILE_NO_FLAGS,
)
.expect("xkb::Keymap could not be created");
unsafe {
wlr_keyboard_set_keymap(keyboard_ptr, keymap.get_raw_ptr());
wlr_keyboard_set_repeat_info(
keyboard_ptr,
config.repeat_rate.0 as i32,
config.repeat_delay.0 as i32,
);
}
}
pub(crate) trait KeyboardEventHandler {
fn modifiers(&self);
fn key(&self, event: *const wlr_event_keyboard_key);
}
impl KeyboardEventHandler for Keyboard {
fn modifiers(&self) {
unsafe {
// A seat can only have one keyboard, but this is a limitation of the
// Wayland protocol - not wlroots. We assign all connected keyboards to the
// same seat. You can swap out the underlying wlr_keyboard like this and
// wlr_seat handles this transparently.
wlr_seat_set_keyboard(self.seat_manager.raw_seat(), self.device.raw_ptr());
// Send modifiers to the client.
wlr_seat_keyboard_notify_modifiers(
self.seat_manager.raw_seat(),
&mut (*self.keyboard).modifiers,
);
}
}
fn key(&self, event: *const wlr_event_keyboard_key) {
let event = unsafe { KeyboardEvent::from_ptr(self, event) };
let handled = self.event_filter_manager.handle_keyboard_event(&event);
if!handled {
unsafe {
// Otherwise, we pass it along to the client.
wlr_seat_set_keyboard(self.seat_manager.raw_seat(), self.device.raw_ptr());
wlr_seat_keyboard_notify_key(
self.seat_manager.raw_seat(),
event.time_msec(),
event.libinput_keycode(),
event.raw_state(),
);
}
}
}
}
wayland_listener!(
KeyboardEventManager,
Weak<Keyboard>,
[
modifiers => modifiers_func: |this: &mut KeyboardEventManager, _data: *mut libc::c_void,| unsafe {
if let Some(handler) = this.data.upgrade() {
handler.modifiers();
}
};
key => key_func: |this: &mut KeyboardEventManager, data: *mut libc::c_void,| unsafe {
if let Some(handler) = this.data.upgrade() {
handler.key(data as _);
}
};
]
);
pub struct KeyboardManager {
config_manager: Rc<ConfigManager>,
seat_manager: Rc<SeatManager>,
event_filter_manager: Rc<EventFilterManager>,
keyboards: RefCell<Vec<Rc<Keyboard>>>,
}
impl KeyboardManager {
pub(crate) fn init(
config_manager: Rc<ConfigManager>,
seat_manager: Rc<SeatManager>,
event_filter_manager: Rc<EventFilterManager>,
) -> Rc<KeyboardManager> {
let keyboard_manager = Rc::new(KeyboardManager {
config_manager,
seat_manager: seat_manager.clone(),
event_filter_manager,
keyboards: RefCell::new(vec![]),
});
seat_manager
.on_new_device
.subscribe(listener!(keyboard_manager => move |device| {
if let DeviceType::Keyboard(_) = device.device_type() {
device.on_destroy.then(listener!(device, keyboard_manager => move || {
keyboard_manager
.keyboards
.borrow_mut()
.retain(|keyboard| keyboard.device.deref()!= device.deref());
keyboard_manager
.seat_manager
.set_has_any_keyboard(keyboard_manager.has_keyboard());
}));
unsafe {
wlr_seat_set_keyboard(keyboard_manager.seat_manager.raw_seat(), device.raw_ptr());
}
let keyboard = Keyboard::init(
keyboard_manager.config_manager.clone(),
keyboard_manager.seat_manager.clone(),
keyboard_manager.event_filter_manager.clone(),
device.clone(),
);
keyboard_manager.keyboards.borrow_mut().push(keyboard);
keyboard_manager.seat_manager.set_has_any_keyboard(true);
}
}));
keyboard_manager
}
pub fn has_keyboard(&self) -> bool {
!self.keyboards.borrow().is_empty()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_util::*;
use std::ptr;
use std::rc::Rc;
#[test]
fn it_drops_and_cleans_up_on_destroy() {
let config_manager = Rc::new(ConfigManager::default());
let seat_manager = SeatManager::mock(ptr::null_mut(), ptr::null_mut());
let event_filter_manager = Rc::new(EventFilterManager::new());
let keyboard_manager = Rc::new(KeyboardManager::init(
config_manager,
seat_manager.clone(),
event_filter_manager,
));
let mut raw_keyboard = wlr_keyboard {
impl_: ptr::null(),
group: ptr::null_mut(),
keymap_string: ptr::null_mut(),
keymap_size: 0,
keymap: ptr::null_mut(),
xkb_state: ptr::null_mut(),
led_indexes: [0; 3],
mod_indexes: [0; 8],
keycodes: [0; 32],
num_keycodes: 0,
modifiers: wlr_keyboard_modifiers {
depressed: 0,
latched: 0,
locked: 0,
group: 0,
},
repeat_info: wlr_keyboard__bindgen_ty_1 { rate: 0, delay: 0 },
events: wlr_keyboard__bindgen_ty_2 {
key: new_wl_signal(),
modifiers: new_wl_signal(),
keymap: new_wl_signal(),
repeat_info: new_wl_signal(),
destroy: new_wl_signal(),
},
data: ptr::null_mut(),
};
let mut device = wlr_input_device {
impl_: ptr::null(),
type_: wlr_input_device_type_WLR_INPUT_DEVICE_KEYBOARD,
vendor: 0,
product: 0,
name: ptr::null_mut(),
width_mm: 0.0,
height_mm: 0.0,
output_name: ptr::null_mut(),
__bindgen_anon_1: wlr_input_device__bindgen_ty_1 {
keyboard: &mut raw_keyboard,
},
events: wlr_input_device__bindgen_ty_2 {
destroy: new_wl_signal(),
},
data: ptr::null_mut(),
link: new_wl_list(),
};
let key_signal = WlSignal::from_ptr(&mut raw_keyboard.events.key);
let modifiers_signal = WlSignal::from_ptr(&mut raw_keyboard.events.modifiers);
let keymap_signal = WlSignal::from_ptr(&mut raw_keyboard.events.keymap);
let repeat_info_signal = WlSignal::from_ptr(&mut raw_keyboard.events.repeat_info);
let destroy_signal = WlSignal::from_ptr(&mut device.events.destroy);
let device = Device::init(&mut device);
let weak_device = Rc::downgrade(&device);
seat_manager.on_new_device.fire(device);
let keyboard = keyboard_manager.keyboards.borrow().first().unwrap().clone();
let weak_keyboard = Rc::downgrade(&keyboard);
drop(keyboard);
assert!(weak_device.upgrade().is_some());
assert!(weak_keyboard.upgrade().is_some());
assert!(key_signal.listener_count() == 1);
assert!(modifiers_signal.listener_count() == 1);
assert!(destroy_signal.listener_count() == 1);
assert!(keyboard_manager.has_keyboard());
destroy_signal.emit();
assert!(key_signal.listener_count() == 0);
assert!(modifiers_signal.listener_count() == 0);
assert!(keymap_signal.listener_count() == 0);
assert!(repeat_info_signal.listener_count() == 0);
assert!(destroy_signal.listener_count() == 0);
assert!(!keyboard_manager.has_keyboard());
assert!(weak_keyboard.upgrade().is_none());
assert!(weak_device.upgrade().is_none());
}
}
#[cfg(test)]
use xkbcommon::xkb::ffi::{xkb_keymap, xkb_state};
#[cfg(test)]
unsafe fn wlr_seat_set_keyboard(_: *mut wlr_seat, _: *mut wlr_input_device) {}
#[cfg(test)]
unsafe fn wlr_keyboard_set_keymap(_: *mut wlr_keyboard, _: *mut xkb_keymap) {}
#[cfg(test)]
unsafe fn wlr_keyboard_set_repeat_info(_: *mut wlr_keyboard, _: i32, _: i32) {}
#[cfg(test)]
unsafe fn xkb_state_ref(ptr: *mut xkb_state) -> *mut xkb_state {
ptr
}
| Keyboard | identifier_name |
keyboard.rs | use crate::input::device::{Device, DeviceType};
use crate::input::event_filter::{EventFilter, EventFilterManager};
use crate::input::events::{InputEvent, KeyboardEvent};
use crate::{config::ConfigManager, input::seat::SeatManager};
use log::debug;
use serde::{Deserialize, Serialize};
use std::cell::RefCell;
use std::ops::Deref;
use std::pin::Pin;
use std::rc::{Rc, Weak};
use wlroots_sys::*;
use xkbcommon::xkb;
#[cfg(not(test))]
use xkbcommon::xkb::ffi::xkb_state_ref;
#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
pub struct RepeatRate(u32);
impl Default for RepeatRate {
fn default() -> Self {
RepeatRate(33)
}
}
#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
pub struct RepeatDelay(u32);
impl Default for RepeatDelay {
fn default() -> Self {
RepeatDelay(500)
}
}
#[derive(Default, Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
#[serde(default)]
pub struct KeyboardConfig {
pub xkb_rules: String,
pub xkb_model: String,
pub xkb_layout: String,
pub xkb_variant: String,
pub xkb_options: Option<String>,
pub repeat_rate: RepeatRate,
pub repeat_delay: RepeatDelay,
}
pub struct Keyboard {
seat_manager: Rc<SeatManager>,
event_filter_manager: Rc<EventFilterManager>,
device: Rc<Device>,
keyboard: *mut wlr_keyboard,
xkb_state: RefCell<xkb::State>,
event_manager: RefCell<Option<Pin<Box<KeyboardEventManager>>>>,
}
impl Keyboard {
fn init(
config_manager: Rc<ConfigManager>,
seat_manager: Rc<SeatManager>,
event_filter_manager: Rc<EventFilterManager>,
device: Rc<Device>,
) -> Rc<Keyboard> {
debug!("Keyboard::init: {}", device.name());
let keyboard_ptr = match device.device_type() {
DeviceType::Keyboard(keyboard_ptr) => keyboard_ptr,
_ => panic!("Keyboard::init expects a keyboard device"),
};
let config = &config_manager.config().keyboard;
set_keymap_from_config(keyboard_ptr, config);
let keyboard = Rc::new(Keyboard {
seat_manager,
event_filter_manager,
device: device.clone(),
keyboard: keyboard_ptr,
xkb_state: RefCell::new(unsafe {
xkb::State::from_raw_ptr(xkb_state_ref((*keyboard_ptr).xkb_state))
}),
event_manager: RefCell::new(None),
});
let subscription =
config_manager
.on_config_changed()
.subscribe(listener!(keyboard => move |config| {
set_keymap_from_config(keyboard.raw_ptr(), &config.keyboard);
*keyboard.xkb_state.borrow_mut() = unsafe {
xkb::State::from_raw_ptr(xkb_state_ref((*keyboard_ptr).xkb_state))
};
}));
device.on_destroy.then(listener!(config_manager => move || {
config_manager.on_config_changed().unsubscribe(subscription);
}));
let mut event_manager = KeyboardEventManager::new(Rc::downgrade(&keyboard));
unsafe {
event_manager.modifiers(&mut (*keyboard_ptr).events.modifiers);
event_manager.key(&mut (*keyboard_ptr).events.key);
}
*keyboard.event_manager.borrow_mut() = Some(event_manager);
keyboard
}
pub fn raw_ptr(&self) -> *mut wlr_keyboard {
self.keyboard
}
pub fn device(&self) -> Rc<Device> {
self.device.clone()
}
pub fn xkb_state(&self) -> xkb::State {
self.xkb_state.borrow().clone()
}
}
fn set_keymap_from_config(keyboard_ptr: *mut wlr_keyboard, config: &KeyboardConfig) {
// We need to prepare an XKB keymap and assign it to the keyboard.
let context = xkb::Context::new(xkb::CONTEXT_NO_FLAGS);
let keymap = xkb::Keymap::new_from_names(
&context,
&config.xkb_rules,
&config.xkb_model,
&config.xkb_layout,
&config.xkb_variant,
config.xkb_options.clone(),
xkb::KEYMAP_COMPILE_NO_FLAGS,
)
.expect("xkb::Keymap could not be created");
unsafe {
wlr_keyboard_set_keymap(keyboard_ptr, keymap.get_raw_ptr());
wlr_keyboard_set_repeat_info(
keyboard_ptr,
config.repeat_rate.0 as i32, | }
}
pub(crate) trait KeyboardEventHandler {
fn modifiers(&self);
fn key(&self, event: *const wlr_event_keyboard_key);
}
impl KeyboardEventHandler for Keyboard {
fn modifiers(&self) {
unsafe {
// A seat can only have one keyboard, but this is a limitation of the
// Wayland protocol - not wlroots. We assign all connected keyboards to the
// same seat. You can swap out the underlying wlr_keyboard like this and
// wlr_seat handles this transparently.
wlr_seat_set_keyboard(self.seat_manager.raw_seat(), self.device.raw_ptr());
// Send modifiers to the client.
wlr_seat_keyboard_notify_modifiers(
self.seat_manager.raw_seat(),
&mut (*self.keyboard).modifiers,
);
}
}
fn key(&self, event: *const wlr_event_keyboard_key) {
let event = unsafe { KeyboardEvent::from_ptr(self, event) };
let handled = self.event_filter_manager.handle_keyboard_event(&event);
if!handled {
unsafe {
// Otherwise, we pass it along to the client.
wlr_seat_set_keyboard(self.seat_manager.raw_seat(), self.device.raw_ptr());
wlr_seat_keyboard_notify_key(
self.seat_manager.raw_seat(),
event.time_msec(),
event.libinput_keycode(),
event.raw_state(),
);
}
}
}
}
wayland_listener!(
KeyboardEventManager,
Weak<Keyboard>,
[
modifiers => modifiers_func: |this: &mut KeyboardEventManager, _data: *mut libc::c_void,| unsafe {
if let Some(handler) = this.data.upgrade() {
handler.modifiers();
}
};
key => key_func: |this: &mut KeyboardEventManager, data: *mut libc::c_void,| unsafe {
if let Some(handler) = this.data.upgrade() {
handler.key(data as _);
}
};
]
);
pub struct KeyboardManager {
config_manager: Rc<ConfigManager>,
seat_manager: Rc<SeatManager>,
event_filter_manager: Rc<EventFilterManager>,
keyboards: RefCell<Vec<Rc<Keyboard>>>,
}
impl KeyboardManager {
pub(crate) fn init(
config_manager: Rc<ConfigManager>,
seat_manager: Rc<SeatManager>,
event_filter_manager: Rc<EventFilterManager>,
) -> Rc<KeyboardManager> {
let keyboard_manager = Rc::new(KeyboardManager {
config_manager,
seat_manager: seat_manager.clone(),
event_filter_manager,
keyboards: RefCell::new(vec![]),
});
seat_manager
.on_new_device
.subscribe(listener!(keyboard_manager => move |device| {
if let DeviceType::Keyboard(_) = device.device_type() {
device.on_destroy.then(listener!(device, keyboard_manager => move || {
keyboard_manager
.keyboards
.borrow_mut()
.retain(|keyboard| keyboard.device.deref()!= device.deref());
keyboard_manager
.seat_manager
.set_has_any_keyboard(keyboard_manager.has_keyboard());
}));
unsafe {
wlr_seat_set_keyboard(keyboard_manager.seat_manager.raw_seat(), device.raw_ptr());
}
let keyboard = Keyboard::init(
keyboard_manager.config_manager.clone(),
keyboard_manager.seat_manager.clone(),
keyboard_manager.event_filter_manager.clone(),
device.clone(),
);
keyboard_manager.keyboards.borrow_mut().push(keyboard);
keyboard_manager.seat_manager.set_has_any_keyboard(true);
}
}));
keyboard_manager
}
pub fn has_keyboard(&self) -> bool {
!self.keyboards.borrow().is_empty()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_util::*;
use std::ptr;
use std::rc::Rc;
#[test]
fn it_drops_and_cleans_up_on_destroy() {
let config_manager = Rc::new(ConfigManager::default());
let seat_manager = SeatManager::mock(ptr::null_mut(), ptr::null_mut());
let event_filter_manager = Rc::new(EventFilterManager::new());
let keyboard_manager = Rc::new(KeyboardManager::init(
config_manager,
seat_manager.clone(),
event_filter_manager,
));
let mut raw_keyboard = wlr_keyboard {
impl_: ptr::null(),
group: ptr::null_mut(),
keymap_string: ptr::null_mut(),
keymap_size: 0,
keymap: ptr::null_mut(),
xkb_state: ptr::null_mut(),
led_indexes: [0; 3],
mod_indexes: [0; 8],
keycodes: [0; 32],
num_keycodes: 0,
modifiers: wlr_keyboard_modifiers {
depressed: 0,
latched: 0,
locked: 0,
group: 0,
},
repeat_info: wlr_keyboard__bindgen_ty_1 { rate: 0, delay: 0 },
events: wlr_keyboard__bindgen_ty_2 {
key: new_wl_signal(),
modifiers: new_wl_signal(),
keymap: new_wl_signal(),
repeat_info: new_wl_signal(),
destroy: new_wl_signal(),
},
data: ptr::null_mut(),
};
let mut device = wlr_input_device {
impl_: ptr::null(),
type_: wlr_input_device_type_WLR_INPUT_DEVICE_KEYBOARD,
vendor: 0,
product: 0,
name: ptr::null_mut(),
width_mm: 0.0,
height_mm: 0.0,
output_name: ptr::null_mut(),
__bindgen_anon_1: wlr_input_device__bindgen_ty_1 {
keyboard: &mut raw_keyboard,
},
events: wlr_input_device__bindgen_ty_2 {
destroy: new_wl_signal(),
},
data: ptr::null_mut(),
link: new_wl_list(),
};
let key_signal = WlSignal::from_ptr(&mut raw_keyboard.events.key);
let modifiers_signal = WlSignal::from_ptr(&mut raw_keyboard.events.modifiers);
let keymap_signal = WlSignal::from_ptr(&mut raw_keyboard.events.keymap);
let repeat_info_signal = WlSignal::from_ptr(&mut raw_keyboard.events.repeat_info);
let destroy_signal = WlSignal::from_ptr(&mut device.events.destroy);
let device = Device::init(&mut device);
let weak_device = Rc::downgrade(&device);
seat_manager.on_new_device.fire(device);
let keyboard = keyboard_manager.keyboards.borrow().first().unwrap().clone();
let weak_keyboard = Rc::downgrade(&keyboard);
drop(keyboard);
assert!(weak_device.upgrade().is_some());
assert!(weak_keyboard.upgrade().is_some());
assert!(key_signal.listener_count() == 1);
assert!(modifiers_signal.listener_count() == 1);
assert!(destroy_signal.listener_count() == 1);
assert!(keyboard_manager.has_keyboard());
destroy_signal.emit();
assert!(key_signal.listener_count() == 0);
assert!(modifiers_signal.listener_count() == 0);
assert!(keymap_signal.listener_count() == 0);
assert!(repeat_info_signal.listener_count() == 0);
assert!(destroy_signal.listener_count() == 0);
assert!(!keyboard_manager.has_keyboard());
assert!(weak_keyboard.upgrade().is_none());
assert!(weak_device.upgrade().is_none());
}
}
#[cfg(test)]
use xkbcommon::xkb::ffi::{xkb_keymap, xkb_state};
#[cfg(test)]
unsafe fn wlr_seat_set_keyboard(_: *mut wlr_seat, _: *mut wlr_input_device) {}
#[cfg(test)]
unsafe fn wlr_keyboard_set_keymap(_: *mut wlr_keyboard, _: *mut xkb_keymap) {}
#[cfg(test)]
unsafe fn wlr_keyboard_set_repeat_info(_: *mut wlr_keyboard, _: i32, _: i32) {}
#[cfg(test)]
unsafe fn xkb_state_ref(ptr: *mut xkb_state) -> *mut xkb_state {
ptr
} | config.repeat_delay.0 as i32,
); | random_line_split |
quic.rs | use {
crossbeam_channel::Sender,
futures_util::stream::StreamExt,
pem::Pem,
pkcs8::{der::Document, AlgorithmIdentifier, ObjectIdentifier},
quinn::{Endpoint, EndpointConfig, ServerConfig},
rcgen::{CertificateParams, DistinguishedName, DnType, SanType},
solana_perf::packet::PacketBatch,
solana_sdk::{
packet::{Packet, PACKET_DATA_SIZE},
signature::Keypair,
},
std::{
error::Error,
net::{IpAddr, SocketAddr, UdpSocket},
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
thread,
time::Duration,
},
tokio::{
runtime::{Builder, Runtime},
time::timeout,
},
};
/// Returns default server configuration along with its PEM certificate chain.
#[allow(clippy::field_reassign_with_default)] // https://github.com/rust-lang/rust-clippy/issues/6527
fn configure_server(
identity_keypair: &Keypair,
gossip_host: IpAddr,
) -> Result<(ServerConfig, String), QuicServerError> {
let (cert_chain, priv_key) =
new_cert(identity_keypair, gossip_host).map_err(|_e| QuicServerError::ConfigureFailed)?;
let cert_chain_pem_parts: Vec<Pem> = cert_chain
.iter()
.map(|cert| Pem {
tag: "CERTIFICATE".to_string(),
contents: cert.0.clone(),
})
.collect();
let cert_chain_pem = pem::encode_many(&cert_chain_pem_parts);
let mut server_config = ServerConfig::with_single_cert(cert_chain, priv_key)
.map_err(|_e| QuicServerError::ConfigureFailed)?;
let config = Arc::get_mut(&mut server_config.transport).unwrap();
const MAX_CONCURRENT_UNI_STREAMS: u32 = 1;
config.max_concurrent_uni_streams(MAX_CONCURRENT_UNI_STREAMS.into());
config.stream_receive_window((PACKET_DATA_SIZE as u32).into());
config.receive_window((PACKET_DATA_SIZE as u32 * MAX_CONCURRENT_UNI_STREAMS).into());
// disable bidi & datagrams
const MAX_CONCURRENT_BIDI_STREAMS: u32 = 0;
config.max_concurrent_bidi_streams(MAX_CONCURRENT_BIDI_STREAMS.into());
config.datagram_receive_buffer_size(None);
Ok((server_config, cert_chain_pem))
}
fn new_cert(
identity_keypair: &Keypair,
san: IpAddr,
) -> Result<(Vec<rustls::Certificate>, rustls::PrivateKey), Box<dyn Error>> {
// Generate a self-signed cert from validator identity key
let cert_params = new_cert_params(identity_keypair, san);
let cert = rcgen::Certificate::from_params(cert_params)?;
let cert_der = cert.serialize_der().unwrap();
let priv_key = cert.serialize_private_key_der();
let priv_key = rustls::PrivateKey(priv_key);
let cert_chain = vec![rustls::Certificate(cert_der)];
Ok((cert_chain, priv_key))
}
fn convert_to_rcgen_keypair(identity_keypair: &Keypair) -> rcgen::KeyPair {
// from https://datatracker.ietf.org/doc/html/rfc8410#section-3
const ED25519_IDENTIFIER: [u32; 4] = [1, 3, 101, 112];
let mut private_key = Vec::<u8>::with_capacity(34);
private_key.extend_from_slice(&[0x04, 0x20]); // ASN.1 OCTET STRING
private_key.extend_from_slice(identity_keypair.secret().as_bytes());
let key_pkcs8 = pkcs8::PrivateKeyInfo {
algorithm: AlgorithmIdentifier {
oid: ObjectIdentifier::from_arcs(&ED25519_IDENTIFIER).unwrap(),
parameters: None,
},
private_key: &private_key,
public_key: None,
};
let key_pkcs8_der = key_pkcs8
.to_der()
.expect("Failed to convert keypair to DER")
.to_der();
// Parse private key into rcgen::KeyPair struct.
rcgen::KeyPair::from_der(&key_pkcs8_der).expect("Failed to parse keypair from DER")
}
fn new_cert_params(identity_keypair: &Keypair, san: IpAddr) -> CertificateParams {
// TODO(terorie): Is it safe to sign the TLS cert with the identity private key?
// Unfortunately, rcgen does not accept a "raw" Ed25519 key.
// We have to convert it to DER and pass it to the library.
// Convert private key into PKCS#8 v1 object.
// RFC 8410, Section 7: Private Key Format
// https://datatracker.ietf.org/doc/html/rfc8410#section-
let keypair = convert_to_rcgen_keypair(identity_keypair);
let mut cert_params = CertificateParams::default();
cert_params.subject_alt_names = vec![SanType::IpAddress(san)];
cert_params.alg = &rcgen::PKCS_ED25519;
cert_params.key_pair = Some(keypair);
cert_params.distinguished_name = DistinguishedName::new();
cert_params
.distinguished_name
.push(DnType::CommonName, "Solana node");
cert_params
}
pub fn rt() -> Runtime {
Builder::new_current_thread().enable_all().build().unwrap()
}
#[derive(thiserror::Error, Debug)]
pub enum | {
#[error("Server configure failed")]
ConfigureFailed,
#[error("Endpoint creation failed")]
EndpointFailed,
}
// Return true if the server should drop the stream
fn handle_chunk(
chunk: &Result<Option<quinn::Chunk>, quinn::ReadError>,
maybe_batch: &mut Option<PacketBatch>,
remote_addr: &SocketAddr,
packet_sender: &Sender<PacketBatch>,
) -> bool {
match chunk {
Ok(maybe_chunk) => {
if let Some(chunk) = maybe_chunk {
trace!("got chunk: {:?}", chunk);
let chunk_len = chunk.bytes.len() as u64;
// shouldn't happen, but sanity check the size and offsets
if chunk.offset > PACKET_DATA_SIZE as u64 || chunk_len > PACKET_DATA_SIZE as u64 {
return true;
}
if chunk.offset + chunk_len > PACKET_DATA_SIZE as u64 {
return true;
}
// chunk looks valid
if maybe_batch.is_none() {
let mut batch = PacketBatch::with_capacity(1);
let mut packet = Packet::default();
packet.meta.set_addr(remote_addr);
batch.packets.push(packet);
*maybe_batch = Some(batch);
}
if let Some(batch) = maybe_batch.as_mut() {
let end = chunk.offset as usize + chunk.bytes.len();
batch.packets[0].data[chunk.offset as usize..end].copy_from_slice(&chunk.bytes);
batch.packets[0].meta.size = std::cmp::max(batch.packets[0].meta.size, end);
}
} else {
trace!("chunk is none");
// done receiving chunks
if let Some(batch) = maybe_batch.take() {
let len = batch.packets[0].meta.size;
if let Err(e) = packet_sender.send(batch) {
info!("send error: {}", e);
} else {
trace!("sent {} byte packet", len);
}
}
return true;
}
}
Err(e) => {
debug!("Received stream error: {:?}", e);
return true;
}
}
false
}
pub fn spawn_server(
sock: UdpSocket,
keypair: &Keypair,
gossip_host: IpAddr,
packet_sender: Sender<PacketBatch>,
exit: Arc<AtomicBool>,
) -> Result<thread::JoinHandle<()>, QuicServerError> {
let (config, _cert) = configure_server(keypair, gossip_host)?;
let runtime = rt();
let (_, mut incoming) = {
let _guard = runtime.enter();
Endpoint::new(EndpointConfig::default(), Some(config), sock)
.map_err(|_e| QuicServerError::EndpointFailed)?
};
let handle = thread::spawn(move || {
let handle = runtime.spawn(async move {
while!exit.load(Ordering::Relaxed) {
const WAIT_FOR_CONNECTION_TIMEOUT_MS: u64 = 1000;
let timeout_connection = timeout(
Duration::from_millis(WAIT_FOR_CONNECTION_TIMEOUT_MS),
incoming.next(),
)
.await;
if let Ok(Some(connection)) = timeout_connection {
if let Ok(new_connection) = connection.await {
let exit = exit.clone();
let quinn::NewConnection {
connection,
mut uni_streams,
..
} = new_connection;
let remote_addr = connection.remote_address();
let packet_sender = packet_sender.clone();
tokio::spawn(async move {
debug!("new connection {}", remote_addr);
while let Some(Ok(mut stream)) = uni_streams.next().await {
let mut maybe_batch = None;
while!exit.load(Ordering::Relaxed) {
if handle_chunk(
&stream.read_chunk(PACKET_DATA_SIZE, false).await,
&mut maybe_batch,
&remote_addr,
&packet_sender,
) {
break;
}
}
}
});
}
}
}
});
if let Err(e) = runtime.block_on(handle) {
warn!("error from runtime.block_on: {:?}", e);
}
});
Ok(handle)
}
#[cfg(test)]
mod test {
use {
super::*,
crossbeam_channel::unbounded,
quinn::{ClientConfig, NewConnection},
std::{net::SocketAddr, time::Instant},
};
struct SkipServerVerification;
impl SkipServerVerification {
fn new() -> Arc<Self> {
Arc::new(Self)
}
}
impl rustls::client::ServerCertVerifier for SkipServerVerification {
fn verify_server_cert(
&self,
_end_entity: &rustls::Certificate,
_intermediates: &[rustls::Certificate],
_server_name: &rustls::ServerName,
_scts: &mut dyn Iterator<Item = &[u8]>,
_ocsp_response: &[u8],
_now: std::time::SystemTime,
) -> Result<rustls::client::ServerCertVerified, rustls::Error> {
Ok(rustls::client::ServerCertVerified::assertion())
}
}
pub fn get_client_config() -> quinn::ClientConfig {
let crypto = rustls::ClientConfig::builder()
.with_safe_defaults()
.with_custom_certificate_verifier(SkipServerVerification::new())
.with_no_client_auth();
ClientConfig::new(Arc::new(crypto))
}
#[test]
fn test_quic_server_exit() {
let s = UdpSocket::bind("127.0.0.1:0").unwrap();
let exit = Arc::new(AtomicBool::new(false));
let (sender, _receiver) = unbounded();
let keypair = Keypair::new();
let ip = "127.0.0.1".parse().unwrap();
let t = spawn_server(s, &keypair, ip, sender, exit.clone()).unwrap();
exit.store(true, Ordering::Relaxed);
t.join().unwrap();
}
fn make_client_endpoint(runtime: &Runtime, addr: &SocketAddr) -> NewConnection {
let client_socket = UdpSocket::bind("127.0.0.1:0").unwrap();
let mut endpoint = quinn::Endpoint::new(EndpointConfig::default(), None, client_socket)
.unwrap()
.0;
endpoint.set_default_client_config(get_client_config());
runtime
.block_on(endpoint.connect(*addr, "localhost").unwrap())
.unwrap()
}
#[test]
fn test_quic_server_multiple_streams() {
solana_logger::setup();
let s = UdpSocket::bind("127.0.0.1:0").unwrap();
let exit = Arc::new(AtomicBool::new(false));
let (sender, receiver) = unbounded();
let keypair = Keypair::new();
let ip = "127.0.0.1".parse().unwrap();
let server_address = s.local_addr().unwrap();
let t = spawn_server(s, &keypair, ip, sender, exit.clone()).unwrap();
let runtime = rt();
let _rt_guard = runtime.enter();
let conn1 = Arc::new(make_client_endpoint(&runtime, &server_address));
let conn2 = Arc::new(make_client_endpoint(&runtime, &server_address));
let mut num_expected_packets = 0;
for i in 0..10 {
info!("sending: {}", i);
let c1 = conn1.clone();
let c2 = conn2.clone();
let handle = runtime.spawn(async move {
let mut s1 = c1.connection.open_uni().await.unwrap();
let mut s2 = c2.connection.open_uni().await.unwrap();
s1.write_all(&[0u8]).await.unwrap();
s1.finish().await.unwrap();
s2.write_all(&[0u8]).await.unwrap();
s2.finish().await.unwrap();
});
runtime.block_on(handle).unwrap();
num_expected_packets += 2;
thread::sleep(Duration::from_millis(200));
}
let mut all_packets = vec![];
let now = Instant::now();
let mut total_packets = 0;
while now.elapsed().as_secs() < 10 {
if let Ok(packets) = receiver.recv_timeout(Duration::from_secs(1)) {
total_packets += packets.packets.len();
all_packets.push(packets)
}
if total_packets == num_expected_packets {
break;
}
}
for batch in all_packets {
for p in &batch.packets {
assert_eq!(p.meta.size, 1);
}
}
assert_eq!(total_packets, num_expected_packets);
exit.store(true, Ordering::Relaxed);
t.join().unwrap();
}
#[test]
fn test_quic_server_multiple_writes() {
solana_logger::setup();
let s = UdpSocket::bind("127.0.0.1:0").unwrap();
let exit = Arc::new(AtomicBool::new(false));
let (sender, receiver) = unbounded();
let keypair = Keypair::new();
let ip = "127.0.0.1".parse().unwrap();
let server_address = s.local_addr().unwrap();
let t = spawn_server(s, &keypair, ip, sender, exit.clone()).unwrap();
let runtime = rt();
let _rt_guard = runtime.enter();
let conn1 = Arc::new(make_client_endpoint(&runtime, &server_address));
// Send a full size packet with single byte writes.
let num_bytes = PACKET_DATA_SIZE;
let num_expected_packets = 1;
let handle = runtime.spawn(async move {
let mut s1 = conn1.connection.open_uni().await.unwrap();
for _ in 0..num_bytes {
s1.write_all(&[0u8]).await.unwrap();
}
s1.finish().await.unwrap();
});
runtime.block_on(handle).unwrap();
let mut all_packets = vec![];
let now = Instant::now();
let mut total_packets = 0;
while now.elapsed().as_secs() < 5 {
if let Ok(packets) = receiver.recv_timeout(Duration::from_secs(1)) {
total_packets += packets.packets.len();
all_packets.push(packets)
}
if total_packets > num_expected_packets {
break;
}
}
for batch in all_packets {
for p in &batch.packets {
assert_eq!(p.meta.size, num_bytes);
}
}
assert_eq!(total_packets, num_expected_packets);
exit.store(true, Ordering::Relaxed);
t.join().unwrap();
}
}
| QuicServerError | identifier_name |
quic.rs | use {
crossbeam_channel::Sender,
futures_util::stream::StreamExt,
pem::Pem,
pkcs8::{der::Document, AlgorithmIdentifier, ObjectIdentifier},
quinn::{Endpoint, EndpointConfig, ServerConfig},
rcgen::{CertificateParams, DistinguishedName, DnType, SanType},
solana_perf::packet::PacketBatch,
solana_sdk::{
packet::{Packet, PACKET_DATA_SIZE},
signature::Keypair,
},
std::{
error::Error,
net::{IpAddr, SocketAddr, UdpSocket},
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
thread,
time::Duration,
},
tokio::{
runtime::{Builder, Runtime},
time::timeout,
},
};
/// Returns default server configuration along with its PEM certificate chain.
#[allow(clippy::field_reassign_with_default)] // https://github.com/rust-lang/rust-clippy/issues/6527
fn configure_server(
identity_keypair: &Keypair,
gossip_host: IpAddr,
) -> Result<(ServerConfig, String), QuicServerError> {
let (cert_chain, priv_key) = | contents: cert.0.clone(),
})
.collect();
let cert_chain_pem = pem::encode_many(&cert_chain_pem_parts);
let mut server_config = ServerConfig::with_single_cert(cert_chain, priv_key)
.map_err(|_e| QuicServerError::ConfigureFailed)?;
let config = Arc::get_mut(&mut server_config.transport).unwrap();
const MAX_CONCURRENT_UNI_STREAMS: u32 = 1;
config.max_concurrent_uni_streams(MAX_CONCURRENT_UNI_STREAMS.into());
config.stream_receive_window((PACKET_DATA_SIZE as u32).into());
config.receive_window((PACKET_DATA_SIZE as u32 * MAX_CONCURRENT_UNI_STREAMS).into());
// disable bidi & datagrams
const MAX_CONCURRENT_BIDI_STREAMS: u32 = 0;
config.max_concurrent_bidi_streams(MAX_CONCURRENT_BIDI_STREAMS.into());
config.datagram_receive_buffer_size(None);
Ok((server_config, cert_chain_pem))
}
fn new_cert(
identity_keypair: &Keypair,
san: IpAddr,
) -> Result<(Vec<rustls::Certificate>, rustls::PrivateKey), Box<dyn Error>> {
// Generate a self-signed cert from validator identity key
let cert_params = new_cert_params(identity_keypair, san);
let cert = rcgen::Certificate::from_params(cert_params)?;
let cert_der = cert.serialize_der().unwrap();
let priv_key = cert.serialize_private_key_der();
let priv_key = rustls::PrivateKey(priv_key);
let cert_chain = vec![rustls::Certificate(cert_der)];
Ok((cert_chain, priv_key))
}
fn convert_to_rcgen_keypair(identity_keypair: &Keypair) -> rcgen::KeyPair {
// from https://datatracker.ietf.org/doc/html/rfc8410#section-3
const ED25519_IDENTIFIER: [u32; 4] = [1, 3, 101, 112];
let mut private_key = Vec::<u8>::with_capacity(34);
private_key.extend_from_slice(&[0x04, 0x20]); // ASN.1 OCTET STRING
private_key.extend_from_slice(identity_keypair.secret().as_bytes());
let key_pkcs8 = pkcs8::PrivateKeyInfo {
algorithm: AlgorithmIdentifier {
oid: ObjectIdentifier::from_arcs(&ED25519_IDENTIFIER).unwrap(),
parameters: None,
},
private_key: &private_key,
public_key: None,
};
let key_pkcs8_der = key_pkcs8
.to_der()
.expect("Failed to convert keypair to DER")
.to_der();
// Parse private key into rcgen::KeyPair struct.
rcgen::KeyPair::from_der(&key_pkcs8_der).expect("Failed to parse keypair from DER")
}
fn new_cert_params(identity_keypair: &Keypair, san: IpAddr) -> CertificateParams {
// TODO(terorie): Is it safe to sign the TLS cert with the identity private key?
// Unfortunately, rcgen does not accept a "raw" Ed25519 key.
// We have to convert it to DER and pass it to the library.
// Convert private key into PKCS#8 v1 object.
// RFC 8410, Section 7: Private Key Format
// https://datatracker.ietf.org/doc/html/rfc8410#section-
let keypair = convert_to_rcgen_keypair(identity_keypair);
let mut cert_params = CertificateParams::default();
cert_params.subject_alt_names = vec![SanType::IpAddress(san)];
cert_params.alg = &rcgen::PKCS_ED25519;
cert_params.key_pair = Some(keypair);
cert_params.distinguished_name = DistinguishedName::new();
cert_params
.distinguished_name
.push(DnType::CommonName, "Solana node");
cert_params
}
pub fn rt() -> Runtime {
Builder::new_current_thread().enable_all().build().unwrap()
}
#[derive(thiserror::Error, Debug)]
pub enum QuicServerError {
#[error("Server configure failed")]
ConfigureFailed,
#[error("Endpoint creation failed")]
EndpointFailed,
}
// Return true if the server should drop the stream
fn handle_chunk(
chunk: &Result<Option<quinn::Chunk>, quinn::ReadError>,
maybe_batch: &mut Option<PacketBatch>,
remote_addr: &SocketAddr,
packet_sender: &Sender<PacketBatch>,
) -> bool {
match chunk {
Ok(maybe_chunk) => {
if let Some(chunk) = maybe_chunk {
trace!("got chunk: {:?}", chunk);
let chunk_len = chunk.bytes.len() as u64;
// shouldn't happen, but sanity check the size and offsets
if chunk.offset > PACKET_DATA_SIZE as u64 || chunk_len > PACKET_DATA_SIZE as u64 {
return true;
}
if chunk.offset + chunk_len > PACKET_DATA_SIZE as u64 {
return true;
}
// chunk looks valid
if maybe_batch.is_none() {
let mut batch = PacketBatch::with_capacity(1);
let mut packet = Packet::default();
packet.meta.set_addr(remote_addr);
batch.packets.push(packet);
*maybe_batch = Some(batch);
}
if let Some(batch) = maybe_batch.as_mut() {
let end = chunk.offset as usize + chunk.bytes.len();
batch.packets[0].data[chunk.offset as usize..end].copy_from_slice(&chunk.bytes);
batch.packets[0].meta.size = std::cmp::max(batch.packets[0].meta.size, end);
}
} else {
trace!("chunk is none");
// done receiving chunks
if let Some(batch) = maybe_batch.take() {
let len = batch.packets[0].meta.size;
if let Err(e) = packet_sender.send(batch) {
info!("send error: {}", e);
} else {
trace!("sent {} byte packet", len);
}
}
return true;
}
}
Err(e) => {
debug!("Received stream error: {:?}", e);
return true;
}
}
false
}
pub fn spawn_server(
sock: UdpSocket,
keypair: &Keypair,
gossip_host: IpAddr,
packet_sender: Sender<PacketBatch>,
exit: Arc<AtomicBool>,
) -> Result<thread::JoinHandle<()>, QuicServerError> {
let (config, _cert) = configure_server(keypair, gossip_host)?;
let runtime = rt();
let (_, mut incoming) = {
let _guard = runtime.enter();
Endpoint::new(EndpointConfig::default(), Some(config), sock)
.map_err(|_e| QuicServerError::EndpointFailed)?
};
let handle = thread::spawn(move || {
let handle = runtime.spawn(async move {
while!exit.load(Ordering::Relaxed) {
const WAIT_FOR_CONNECTION_TIMEOUT_MS: u64 = 1000;
let timeout_connection = timeout(
Duration::from_millis(WAIT_FOR_CONNECTION_TIMEOUT_MS),
incoming.next(),
)
.await;
if let Ok(Some(connection)) = timeout_connection {
if let Ok(new_connection) = connection.await {
let exit = exit.clone();
let quinn::NewConnection {
connection,
mut uni_streams,
..
} = new_connection;
let remote_addr = connection.remote_address();
let packet_sender = packet_sender.clone();
tokio::spawn(async move {
debug!("new connection {}", remote_addr);
while let Some(Ok(mut stream)) = uni_streams.next().await {
let mut maybe_batch = None;
while!exit.load(Ordering::Relaxed) {
if handle_chunk(
&stream.read_chunk(PACKET_DATA_SIZE, false).await,
&mut maybe_batch,
&remote_addr,
&packet_sender,
) {
break;
}
}
}
});
}
}
}
});
if let Err(e) = runtime.block_on(handle) {
warn!("error from runtime.block_on: {:?}", e);
}
});
Ok(handle)
}
#[cfg(test)]
mod test {
use {
super::*,
crossbeam_channel::unbounded,
quinn::{ClientConfig, NewConnection},
std::{net::SocketAddr, time::Instant},
};
struct SkipServerVerification;
impl SkipServerVerification {
fn new() -> Arc<Self> {
Arc::new(Self)
}
}
impl rustls::client::ServerCertVerifier for SkipServerVerification {
fn verify_server_cert(
&self,
_end_entity: &rustls::Certificate,
_intermediates: &[rustls::Certificate],
_server_name: &rustls::ServerName,
_scts: &mut dyn Iterator<Item = &[u8]>,
_ocsp_response: &[u8],
_now: std::time::SystemTime,
) -> Result<rustls::client::ServerCertVerified, rustls::Error> {
Ok(rustls::client::ServerCertVerified::assertion())
}
}
pub fn get_client_config() -> quinn::ClientConfig {
let crypto = rustls::ClientConfig::builder()
.with_safe_defaults()
.with_custom_certificate_verifier(SkipServerVerification::new())
.with_no_client_auth();
ClientConfig::new(Arc::new(crypto))
}
#[test]
fn test_quic_server_exit() {
let s = UdpSocket::bind("127.0.0.1:0").unwrap();
let exit = Arc::new(AtomicBool::new(false));
let (sender, _receiver) = unbounded();
let keypair = Keypair::new();
let ip = "127.0.0.1".parse().unwrap();
let t = spawn_server(s, &keypair, ip, sender, exit.clone()).unwrap();
exit.store(true, Ordering::Relaxed);
t.join().unwrap();
}
fn make_client_endpoint(runtime: &Runtime, addr: &SocketAddr) -> NewConnection {
let client_socket = UdpSocket::bind("127.0.0.1:0").unwrap();
let mut endpoint = quinn::Endpoint::new(EndpointConfig::default(), None, client_socket)
.unwrap()
.0;
endpoint.set_default_client_config(get_client_config());
runtime
.block_on(endpoint.connect(*addr, "localhost").unwrap())
.unwrap()
}
#[test]
fn test_quic_server_multiple_streams() {
solana_logger::setup();
let s = UdpSocket::bind("127.0.0.1:0").unwrap();
let exit = Arc::new(AtomicBool::new(false));
let (sender, receiver) = unbounded();
let keypair = Keypair::new();
let ip = "127.0.0.1".parse().unwrap();
let server_address = s.local_addr().unwrap();
let t = spawn_server(s, &keypair, ip, sender, exit.clone()).unwrap();
let runtime = rt();
let _rt_guard = runtime.enter();
let conn1 = Arc::new(make_client_endpoint(&runtime, &server_address));
let conn2 = Arc::new(make_client_endpoint(&runtime, &server_address));
let mut num_expected_packets = 0;
for i in 0..10 {
info!("sending: {}", i);
let c1 = conn1.clone();
let c2 = conn2.clone();
let handle = runtime.spawn(async move {
let mut s1 = c1.connection.open_uni().await.unwrap();
let mut s2 = c2.connection.open_uni().await.unwrap();
s1.write_all(&[0u8]).await.unwrap();
s1.finish().await.unwrap();
s2.write_all(&[0u8]).await.unwrap();
s2.finish().await.unwrap();
});
runtime.block_on(handle).unwrap();
num_expected_packets += 2;
thread::sleep(Duration::from_millis(200));
}
let mut all_packets = vec![];
let now = Instant::now();
let mut total_packets = 0;
while now.elapsed().as_secs() < 10 {
if let Ok(packets) = receiver.recv_timeout(Duration::from_secs(1)) {
total_packets += packets.packets.len();
all_packets.push(packets)
}
if total_packets == num_expected_packets {
break;
}
}
for batch in all_packets {
for p in &batch.packets {
assert_eq!(p.meta.size, 1);
}
}
assert_eq!(total_packets, num_expected_packets);
exit.store(true, Ordering::Relaxed);
t.join().unwrap();
}
#[test]
fn test_quic_server_multiple_writes() {
solana_logger::setup();
let s = UdpSocket::bind("127.0.0.1:0").unwrap();
let exit = Arc::new(AtomicBool::new(false));
let (sender, receiver) = unbounded();
let keypair = Keypair::new();
let ip = "127.0.0.1".parse().unwrap();
let server_address = s.local_addr().unwrap();
let t = spawn_server(s, &keypair, ip, sender, exit.clone()).unwrap();
let runtime = rt();
let _rt_guard = runtime.enter();
let conn1 = Arc::new(make_client_endpoint(&runtime, &server_address));
// Send a full size packet with single byte writes.
let num_bytes = PACKET_DATA_SIZE;
let num_expected_packets = 1;
let handle = runtime.spawn(async move {
let mut s1 = conn1.connection.open_uni().await.unwrap();
for _ in 0..num_bytes {
s1.write_all(&[0u8]).await.unwrap();
}
s1.finish().await.unwrap();
});
runtime.block_on(handle).unwrap();
let mut all_packets = vec![];
let now = Instant::now();
let mut total_packets = 0;
while now.elapsed().as_secs() < 5 {
if let Ok(packets) = receiver.recv_timeout(Duration::from_secs(1)) {
total_packets += packets.packets.len();
all_packets.push(packets)
}
if total_packets > num_expected_packets {
break;
}
}
for batch in all_packets {
for p in &batch.packets {
assert_eq!(p.meta.size, num_bytes);
}
}
assert_eq!(total_packets, num_expected_packets);
exit.store(true, Ordering::Relaxed);
t.join().unwrap();
}
} | new_cert(identity_keypair, gossip_host).map_err(|_e| QuicServerError::ConfigureFailed)?;
let cert_chain_pem_parts: Vec<Pem> = cert_chain
.iter()
.map(|cert| Pem {
tag: "CERTIFICATE".to_string(), | random_line_split |
quic.rs | use {
crossbeam_channel::Sender,
futures_util::stream::StreamExt,
pem::Pem,
pkcs8::{der::Document, AlgorithmIdentifier, ObjectIdentifier},
quinn::{Endpoint, EndpointConfig, ServerConfig},
rcgen::{CertificateParams, DistinguishedName, DnType, SanType},
solana_perf::packet::PacketBatch,
solana_sdk::{
packet::{Packet, PACKET_DATA_SIZE},
signature::Keypair,
},
std::{
error::Error,
net::{IpAddr, SocketAddr, UdpSocket},
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
thread,
time::Duration,
},
tokio::{
runtime::{Builder, Runtime},
time::timeout,
},
};
/// Returns default server configuration along with its PEM certificate chain.
#[allow(clippy::field_reassign_with_default)] // https://github.com/rust-lang/rust-clippy/issues/6527
fn configure_server(
identity_keypair: &Keypair,
gossip_host: IpAddr,
) -> Result<(ServerConfig, String), QuicServerError> {
let (cert_chain, priv_key) =
new_cert(identity_keypair, gossip_host).map_err(|_e| QuicServerError::ConfigureFailed)?;
let cert_chain_pem_parts: Vec<Pem> = cert_chain
.iter()
.map(|cert| Pem {
tag: "CERTIFICATE".to_string(),
contents: cert.0.clone(),
})
.collect();
let cert_chain_pem = pem::encode_many(&cert_chain_pem_parts);
let mut server_config = ServerConfig::with_single_cert(cert_chain, priv_key)
.map_err(|_e| QuicServerError::ConfigureFailed)?;
let config = Arc::get_mut(&mut server_config.transport).unwrap();
const MAX_CONCURRENT_UNI_STREAMS: u32 = 1;
config.max_concurrent_uni_streams(MAX_CONCURRENT_UNI_STREAMS.into());
config.stream_receive_window((PACKET_DATA_SIZE as u32).into());
config.receive_window((PACKET_DATA_SIZE as u32 * MAX_CONCURRENT_UNI_STREAMS).into());
// disable bidi & datagrams
const MAX_CONCURRENT_BIDI_STREAMS: u32 = 0;
config.max_concurrent_bidi_streams(MAX_CONCURRENT_BIDI_STREAMS.into());
config.datagram_receive_buffer_size(None);
Ok((server_config, cert_chain_pem))
}
fn new_cert(
identity_keypair: &Keypair,
san: IpAddr,
) -> Result<(Vec<rustls::Certificate>, rustls::PrivateKey), Box<dyn Error>> {
// Generate a self-signed cert from validator identity key
let cert_params = new_cert_params(identity_keypair, san);
let cert = rcgen::Certificate::from_params(cert_params)?;
let cert_der = cert.serialize_der().unwrap();
let priv_key = cert.serialize_private_key_der();
let priv_key = rustls::PrivateKey(priv_key);
let cert_chain = vec![rustls::Certificate(cert_der)];
Ok((cert_chain, priv_key))
}
fn convert_to_rcgen_keypair(identity_keypair: &Keypair) -> rcgen::KeyPair {
// from https://datatracker.ietf.org/doc/html/rfc8410#section-3
const ED25519_IDENTIFIER: [u32; 4] = [1, 3, 101, 112];
let mut private_key = Vec::<u8>::with_capacity(34);
private_key.extend_from_slice(&[0x04, 0x20]); // ASN.1 OCTET STRING
private_key.extend_from_slice(identity_keypair.secret().as_bytes());
let key_pkcs8 = pkcs8::PrivateKeyInfo {
algorithm: AlgorithmIdentifier {
oid: ObjectIdentifier::from_arcs(&ED25519_IDENTIFIER).unwrap(),
parameters: None,
},
private_key: &private_key,
public_key: None,
};
let key_pkcs8_der = key_pkcs8
.to_der()
.expect("Failed to convert keypair to DER")
.to_der();
// Parse private key into rcgen::KeyPair struct.
rcgen::KeyPair::from_der(&key_pkcs8_der).expect("Failed to parse keypair from DER")
}
fn new_cert_params(identity_keypair: &Keypair, san: IpAddr) -> CertificateParams {
// TODO(terorie): Is it safe to sign the TLS cert with the identity private key?
// Unfortunately, rcgen does not accept a "raw" Ed25519 key.
// We have to convert it to DER and pass it to the library.
// Convert private key into PKCS#8 v1 object.
// RFC 8410, Section 7: Private Key Format
// https://datatracker.ietf.org/doc/html/rfc8410#section-
let keypair = convert_to_rcgen_keypair(identity_keypair);
let mut cert_params = CertificateParams::default();
cert_params.subject_alt_names = vec![SanType::IpAddress(san)];
cert_params.alg = &rcgen::PKCS_ED25519;
cert_params.key_pair = Some(keypair);
cert_params.distinguished_name = DistinguishedName::new();
cert_params
.distinguished_name
.push(DnType::CommonName, "Solana node");
cert_params
}
pub fn rt() -> Runtime {
Builder::new_current_thread().enable_all().build().unwrap()
}
#[derive(thiserror::Error, Debug)]
pub enum QuicServerError {
#[error("Server configure failed")]
ConfigureFailed,
#[error("Endpoint creation failed")]
EndpointFailed,
}
// Return true if the server should drop the stream
fn handle_chunk(
chunk: &Result<Option<quinn::Chunk>, quinn::ReadError>,
maybe_batch: &mut Option<PacketBatch>,
remote_addr: &SocketAddr,
packet_sender: &Sender<PacketBatch>,
) -> bool {
match chunk {
Ok(maybe_chunk) => {
if let Some(chunk) = maybe_chunk {
trace!("got chunk: {:?}", chunk);
let chunk_len = chunk.bytes.len() as u64;
// shouldn't happen, but sanity check the size and offsets
if chunk.offset > PACKET_DATA_SIZE as u64 || chunk_len > PACKET_DATA_SIZE as u64 {
return true;
}
if chunk.offset + chunk_len > PACKET_DATA_SIZE as u64 {
return true;
}
// chunk looks valid
if maybe_batch.is_none() {
let mut batch = PacketBatch::with_capacity(1);
let mut packet = Packet::default();
packet.meta.set_addr(remote_addr);
batch.packets.push(packet);
*maybe_batch = Some(batch);
}
if let Some(batch) = maybe_batch.as_mut() {
let end = chunk.offset as usize + chunk.bytes.len();
batch.packets[0].data[chunk.offset as usize..end].copy_from_slice(&chunk.bytes);
batch.packets[0].meta.size = std::cmp::max(batch.packets[0].meta.size, end);
}
} else {
trace!("chunk is none");
// done receiving chunks
if let Some(batch) = maybe_batch.take() {
let len = batch.packets[0].meta.size;
if let Err(e) = packet_sender.send(batch) {
info!("send error: {}", e);
} else {
trace!("sent {} byte packet", len);
}
}
return true;
}
}
Err(e) => {
debug!("Received stream error: {:?}", e);
return true;
}
}
false
}
pub fn spawn_server(
sock: UdpSocket,
keypair: &Keypair,
gossip_host: IpAddr,
packet_sender: Sender<PacketBatch>,
exit: Arc<AtomicBool>,
) -> Result<thread::JoinHandle<()>, QuicServerError> {
let (config, _cert) = configure_server(keypair, gossip_host)?;
let runtime = rt();
let (_, mut incoming) = {
let _guard = runtime.enter();
Endpoint::new(EndpointConfig::default(), Some(config), sock)
.map_err(|_e| QuicServerError::EndpointFailed)?
};
let handle = thread::spawn(move || {
let handle = runtime.spawn(async move {
while!exit.load(Ordering::Relaxed) {
const WAIT_FOR_CONNECTION_TIMEOUT_MS: u64 = 1000;
let timeout_connection = timeout(
Duration::from_millis(WAIT_FOR_CONNECTION_TIMEOUT_MS),
incoming.next(),
)
.await;
if let Ok(Some(connection)) = timeout_connection {
if let Ok(new_connection) = connection.await {
let exit = exit.clone();
let quinn::NewConnection {
connection,
mut uni_streams,
..
} = new_connection;
let remote_addr = connection.remote_address();
let packet_sender = packet_sender.clone();
tokio::spawn(async move {
debug!("new connection {}", remote_addr);
while let Some(Ok(mut stream)) = uni_streams.next().await {
let mut maybe_batch = None;
while!exit.load(Ordering::Relaxed) {
if handle_chunk(
&stream.read_chunk(PACKET_DATA_SIZE, false).await,
&mut maybe_batch,
&remote_addr,
&packet_sender,
) {
break;
}
}
}
});
}
}
}
});
if let Err(e) = runtime.block_on(handle) {
warn!("error from runtime.block_on: {:?}", e);
}
});
Ok(handle)
}
#[cfg(test)]
mod test {
use {
super::*,
crossbeam_channel::unbounded,
quinn::{ClientConfig, NewConnection},
std::{net::SocketAddr, time::Instant},
};
struct SkipServerVerification;
impl SkipServerVerification {
fn new() -> Arc<Self> {
Arc::new(Self)
}
}
impl rustls::client::ServerCertVerifier for SkipServerVerification {
fn verify_server_cert(
&self,
_end_entity: &rustls::Certificate,
_intermediates: &[rustls::Certificate],
_server_name: &rustls::ServerName,
_scts: &mut dyn Iterator<Item = &[u8]>,
_ocsp_response: &[u8],
_now: std::time::SystemTime,
) -> Result<rustls::client::ServerCertVerified, rustls::Error> {
Ok(rustls::client::ServerCertVerified::assertion())
}
}
pub fn get_client_config() -> quinn::ClientConfig {
let crypto = rustls::ClientConfig::builder()
.with_safe_defaults()
.with_custom_certificate_verifier(SkipServerVerification::new())
.with_no_client_auth();
ClientConfig::new(Arc::new(crypto))
}
#[test]
fn test_quic_server_exit() {
let s = UdpSocket::bind("127.0.0.1:0").unwrap();
let exit = Arc::new(AtomicBool::new(false));
let (sender, _receiver) = unbounded();
let keypair = Keypair::new();
let ip = "127.0.0.1".parse().unwrap();
let t = spawn_server(s, &keypair, ip, sender, exit.clone()).unwrap();
exit.store(true, Ordering::Relaxed);
t.join().unwrap();
}
fn make_client_endpoint(runtime: &Runtime, addr: &SocketAddr) -> NewConnection {
let client_socket = UdpSocket::bind("127.0.0.1:0").unwrap();
let mut endpoint = quinn::Endpoint::new(EndpointConfig::default(), None, client_socket)
.unwrap()
.0;
endpoint.set_default_client_config(get_client_config());
runtime
.block_on(endpoint.connect(*addr, "localhost").unwrap())
.unwrap()
}
#[test]
fn test_quic_server_multiple_streams() | let mut s1 = c1.connection.open_uni().await.unwrap();
let mut s2 = c2.connection.open_uni().await.unwrap();
s1.write_all(&[0u8]).await.unwrap();
s1.finish().await.unwrap();
s2.write_all(&[0u8]).await.unwrap();
s2.finish().await.unwrap();
});
runtime.block_on(handle).unwrap();
num_expected_packets += 2;
thread::sleep(Duration::from_millis(200));
}
let mut all_packets = vec![];
let now = Instant::now();
let mut total_packets = 0;
while now.elapsed().as_secs() < 10 {
if let Ok(packets) = receiver.recv_timeout(Duration::from_secs(1)) {
total_packets += packets.packets.len();
all_packets.push(packets)
}
if total_packets == num_expected_packets {
break;
}
}
for batch in all_packets {
for p in &batch.packets {
assert_eq!(p.meta.size, 1);
}
}
assert_eq!(total_packets, num_expected_packets);
exit.store(true, Ordering::Relaxed);
t.join().unwrap();
}
#[test]
fn test_quic_server_multiple_writes() {
solana_logger::setup();
let s = UdpSocket::bind("127.0.0.1:0").unwrap();
let exit = Arc::new(AtomicBool::new(false));
let (sender, receiver) = unbounded();
let keypair = Keypair::new();
let ip = "127.0.0.1".parse().unwrap();
let server_address = s.local_addr().unwrap();
let t = spawn_server(s, &keypair, ip, sender, exit.clone()).unwrap();
let runtime = rt();
let _rt_guard = runtime.enter();
let conn1 = Arc::new(make_client_endpoint(&runtime, &server_address));
// Send a full size packet with single byte writes.
let num_bytes = PACKET_DATA_SIZE;
let num_expected_packets = 1;
let handle = runtime.spawn(async move {
let mut s1 = conn1.connection.open_uni().await.unwrap();
for _ in 0..num_bytes {
s1.write_all(&[0u8]).await.unwrap();
}
s1.finish().await.unwrap();
});
runtime.block_on(handle).unwrap();
let mut all_packets = vec![];
let now = Instant::now();
let mut total_packets = 0;
while now.elapsed().as_secs() < 5 {
if let Ok(packets) = receiver.recv_timeout(Duration::from_secs(1)) {
total_packets += packets.packets.len();
all_packets.push(packets)
}
if total_packets > num_expected_packets {
break;
}
}
for batch in all_packets {
for p in &batch.packets {
assert_eq!(p.meta.size, num_bytes);
}
}
assert_eq!(total_packets, num_expected_packets);
exit.store(true, Ordering::Relaxed);
t.join().unwrap();
}
}
| {
solana_logger::setup();
let s = UdpSocket::bind("127.0.0.1:0").unwrap();
let exit = Arc::new(AtomicBool::new(false));
let (sender, receiver) = unbounded();
let keypair = Keypair::new();
let ip = "127.0.0.1".parse().unwrap();
let server_address = s.local_addr().unwrap();
let t = spawn_server(s, &keypair, ip, sender, exit.clone()).unwrap();
let runtime = rt();
let _rt_guard = runtime.enter();
let conn1 = Arc::new(make_client_endpoint(&runtime, &server_address));
let conn2 = Arc::new(make_client_endpoint(&runtime, &server_address));
let mut num_expected_packets = 0;
for i in 0..10 {
info!("sending: {}", i);
let c1 = conn1.clone();
let c2 = conn2.clone();
let handle = runtime.spawn(async move { | identifier_body |
render.rs | //! Definitions, constructors, and management for the EnsoGL shapes that are used to draw an edge.
//!
//! The core function of this module is to translate edge layouts into the shape parameters that
//! will implement them.
use crate::prelude::*;
use ensogl::display::shape::*;
use crate::GraphLayers;
use super::layout::Corner;
use super::layout::EdgeSplit;
use super::layout::Oriented;
use super::layout::SplitArc;
use super::layout::TargetAttachment;
use ensogl::data::color;
use ensogl::display;
use ensogl::display::scene::Scene;
use std::f32::consts::FRAC_PI_2;
use std::f32::consts::PI;
use std::f32::consts::TAU;
// =================
// === Constants ===
// =================
const LINE_WIDTH: f32 = 4.0;
const HOVER_EXTENSION: f32 = 10.0;
pub(super) const HOVER_WIDTH: f32 = LINE_WIDTH + HOVER_EXTENSION;
mod arrow {
use super::*;
pub(super) const SIZE: Vector2 = Vector2(18.75, 18.75);
}
mod attachment {
/// Extra length to add to the top and bottom of the target-attachment bit, to ensure that it
/// appears to pass through the top of the node. Without this adjustment, inexact
/// floating-point math and anti-aliasing would cause a 1-pixel gap artifact right where
/// the attachment should meet the corner at the edge of the node.
pub(super) const LENGTH_ADJUSTMENT: f32 = 0.1;
}
// ===================
// === Edge Shapes ===
// ===================
/// The shapes used to render an edge.
#[derive(Debug, Default)]
pub(super) struct Shapes {
/// The individual [`Corner`]s making up the edge. Each is drawn in the focused or unfocused
/// color.
sections: RefCell<Vec<Rectangle>>,
/// A pair of [`arc`] shapes used when the mouse is over the rounded corner, and the edge must
/// must be split into focused and unfocused sides at a certain angle along the arc.
split_arc: RefCell<Option<[arc::View; 2]>>,
/// Wider versions of the [`sections`], for receiving mouse events.
hover_sections: RefCell<Vec<Rectangle>>,
/// The end of the edge that is drawn on top of the node and connects to the target node's
/// input port.
target_attachment: RefCell<Option<Rectangle>>,
/// Arrow drawn on long backward edges to indicate data flow direction.
dataflow_arrow: RefCell<Option<Rectangle>>,
/// An rectangle representing the source node shape when the edge is in detached state. Used
/// to mask out the edge fragment that would otherwise be drawn over the source node.
source_cutout: RefCell<Option<Rectangle>>,
}
impl Shapes {
/// Redraw the arrow used to mark long backward edges.
pub(super) fn redraw_dataflow_arrow(
&self,
parent: &impl ShapeParent,
parameters: RedrawDataflowArrow,
) {
let RedrawDataflowArrow { arrow, source_color, target_color, focus_split, is_attached } =
parameters;
let shape = self.dataflow_arrow.take();
if let Some(arrow_center) = arrow {
// The arrow will have the same color as the target-end of the first corner from the
// source (this is the `arrow_center` point).
let color = match focus_split.map(|split| split.corner_index) {
Some(0) => target_color,
_ => source_color,
};
let shape = shape.unwrap_or_else(|| parent.new_dataflow_arrow());
shape.set_xy(arrow_center - arrow::SIZE / 2.0);
shape.set_color(color);
Self::set_layer(parent, &shape, is_attached, false);
self.dataflow_arrow.replace(Some(shape));
}
}
/// Redraw the invisible mouse-event-catching edges.
pub(super) fn redraw_hover_sections(
&self,
parent: &impl ShapeParent,
corners: &[Oriented<Corner>],
) {
let hover_factory = self
.hover_sections
.take()
.into_iter()
.chain(iter::repeat_with(|| parent.new_hover_section()));
*self.hover_sections.borrow_mut() = corners
.iter()
.zip(hover_factory)
.map(|(corner, shape)| draw_corner(shape, **corner, INVISIBLE_HOVER_COLOR, HOVER_WIDTH))
.collect();
}
/// Redraw the sections, each of which is a [`Rectangle`] implementing a [`Corner`], or multiple
/// [`Rectangle`]s and multiple [`arc::View`]s, if it is a split [`Corner`].
pub(super) fn redraw_sections(&self, parent: &impl ShapeParent, parameters: RedrawSections) {
let RedrawSections { corners, source_color, target_color, focus_split, is_attached } =
parameters;
let corner_index =
focus_split.map(|split| split.corner_index).unwrap_or_else(|| corners.len());
let split_corner = focus_split.map(|split| split.split_corner);
let mut section_factory =
self.sections.take().into_iter().chain(iter::repeat_with(|| parent.new_section()));
let mut new_sections = self.redraw_complete_sections(
&mut section_factory,
corners,
corner_index,
source_color,
target_color,
);
let arc_shapes = self.split_arc.take();
if let Some(split_corner) = split_corner {
if let Some(split_arc) = split_corner.split_arc {
let arc_shapes = arc_shapes.unwrap_or_else(|| [parent.new_arc(), parent.new_arc()]);
let arc_shapes = draw_split_arc(arc_shapes, split_arc);
arc_shapes[0].color.set(source_color.into());
arc_shapes[1].color.set(target_color.into());
self.split_arc.replace(Some(arc_shapes));
}
let (source_shape, target_shape) =
(section_factory.next().unwrap(), section_factory.next().unwrap());
new_sections.extend([
draw_corner(source_shape, *split_corner.source_end, source_color, LINE_WIDTH),
draw_corner(target_shape, *split_corner.target_end, target_color, LINE_WIDTH),
]);
}
for (i, shape) in new_sections.iter().enumerate() {
Self::set_layer(parent, shape, is_attached, i == 0);
}
*self.sections.borrow_mut() = new_sections;
}
pub(crate) fn redraw_cutout(
&self,
parent: &impl ShapeParent,
is_attached: bool,
source_size: Vector2,
) {
let cutout = self.source_cutout.take();
if!is_attached {
let cutout = cutout.unwrap_or_else(|| parent.new_cutout());
cutout.set_xy(-source_size / 2.0);
cutout.set_size(source_size);
self.source_cutout.replace(Some(cutout));
}
}
/// Redraw the sections that aren't split by the focus position.
pub(super) fn redraw_complete_sections(
&self,
section_factory: impl Iterator<Item = Rectangle>,
corners: &[Oriented<Corner>],
corner_index: usize,
source_color: color::Rgba,
target_color: color::Rgba,
) -> Vec<Rectangle> {
corners
.iter()
.enumerate()
.filter_map(|(i, corner)| {
if i == corner_index {
None
} else {
let color = match i < corner_index {
true => source_color,
false => target_color,
};
Some((color, corner))
}
})
.zip(section_factory)
.map(|((color, corner), shape)| draw_corner(shape, **corner, color, LINE_WIDTH))
.collect()
}
/// Redraw the little bit that goes on top of the target node.
pub(super) fn redraw_target_attachment(
&self,
parent: &impl ShapeParent,
target_attachment: Option<TargetAttachment>,
color: color::Rgba,
) {
let shape = self.target_attachment.take();
if let Some(TargetAttachment { target, length }) = target_attachment
&& length > f32::EPSILON {
let shape = shape.unwrap_or_else(|| parent.new_target_attachment());
shape.set_size_y(length + attachment::LENGTH_ADJUSTMENT * 2.0);
let offset = Vector2(-LINE_WIDTH / 2.0, - length - attachment::LENGTH_ADJUSTMENT);
shape.set_xy(target + offset);
shape.set_color(color);
self.target_attachment.replace(Some(shape));
}
}
/// Add the given shape to the appropriate layer depending on whether it is attached.
fn set_layer(
parent: &impl ShapeParent,
shape: &Rectangle,
below_nodes: bool,
near_source: bool,
) {
let layers = parent.layers();
let layer = if below_nodes {
&layers.edge_below_nodes
} else if near_source {
&layers.masked_edge_above_nodes
} else {
&layers.edge_above_nodes
};
layer.add(shape);
}
}
// === Redraw parameters ====
/// Arguments passed to [`Shapes::redraw_sections`].
pub(super) struct RedrawSections<'a> {
/// The corners to be redrawn.
pub(super) corners: &'a [Oriented<Corner>],
/// The color to use for the part of the edge closer to the source.
pub(super) source_color: color::Rgba,
/// The color to use for the part of the edge closer to the target.
pub(super) target_color: color::Rgba,
/// Where the edge should be split into differently-colored source and target parts.
pub(super) focus_split: Option<EdgeSplit>,
/// Whether the edge is fully-attached.
pub(super) is_attached: bool,
}
/// Arguments passed to [`Shapes::redraw_dataflow_arrow`].
pub(super) struct RedrawDataflowArrow {
/// The center of the arrow, if the arrow should be drawn.
pub(super) arrow: Option<Vector2>,
/// The color to use for the part of the edge closer to the source.
pub(super) source_color: color::Rgba,
/// The color to use for the part of the edge closer to the target.
pub(super) target_color: color::Rgba,
/// Where the edge should be split into differently-colored source and target parts.
pub(super) focus_split: Option<EdgeSplit>,
/// Whether the edge is fully-attached.
pub(super) is_attached: bool,
}
// =========================
// === Shape Definitions ===
// =========================
/// An arc around the origin. `outer_radius` determines the distance from the origin to the outer
/// edge of the arc, `stroke_width` the width of the arc. The arc starts at `start_angle`, relative
/// to the origin. Its radial size is `sector_angle`. The ends are flat, not rounded as in
/// [`RoundedArc`].
mod arc {
use super::*;
ensogl::shape! {
pointer_events = false;
(
style: Style,
color: Vector4,
outer_radius: f32,
stroke_width: f32,
start_angle: f32,
sector_angle: f32,
) {
let circle = Circle(outer_radius.px()) - Circle((outer_radius - stroke_width).px());
let angle_adjust = Var::<f32>::from(FRAC_PI_2);
let rotate_angle = -start_angle + angle_adjust - §or_angle / 2.0;
let angle = PlaneAngleFast(sector_angle).rotate(rotate_angle);
let angle = angle.grow(0.5.px());
let shape = circle * angle;
let shape = shape.fill(color);
shape.into()
}
}
}
// ======================
// === Shape Creation ===
// ======================
pub(super) trait ShapeParent: display::Object {
fn scene(&self) -> &Scene;
fn layers(&self) -> &GraphLayers;
/// Create a shape object to render one of the [`Corner`]s making up the edge.
fn new_section(&self) -> Rectangle {
let new = Rectangle::new();
new.set_inner_border(LINE_WIDTH, 0.0);
new.set_color(color::Rgba::transparent());
new.set_pointer_events(false);
self.display_object().add_child(&new);
new
}
/// Create a shape object to render the invisible hover area corresponding to one of the
/// [`Corner`]s making up the edge.
fn new_hover_section(&self) -> Rectangle {
let new = Rectangle::new();
new.set_inner_border(HOVER_WIDTH, 0.0);
new.set_color(color::Rgba::transparent());
self.display_object().add_child(&new);
self.layers().edge_below_nodes.add(&new);
new
}
/// Create a shape object to render an arbitrary-angle arc. This is used when the focus is split
/// in the rounded part of a [`Corner`].
fn new_arc(&self) -> arc::View {
let arc = arc::View::new(); |
/// Create a shape object to render the little bit at the target end of the edge, that draws on
/// top of the node.
fn new_target_attachment(&self) -> Rectangle {
let new = Rectangle::new();
new.set_size_x(LINE_WIDTH);
new.set_border_color(color::Rgba::transparent());
new.set_pointer_events(false);
self.display_object().add_child(&new);
self.layers().edge_above_nodes.add(&new);
new
}
/// Create a shape object to render the arrow that is drawn on long backward edges to show the
/// direction of data flow.
fn new_dataflow_arrow(&self) -> Rectangle {
let new = SimpleTriangle::from_size(arrow::SIZE);
new.set_pointer_events(false);
self.display_object().add_child(&new);
new.into()
}
/// Create a shape object to render the cutout mask for the edge nearby the source node.
fn new_cutout(&self) -> Rectangle {
let cutout = Rectangle::new();
self.display_object().add_child(&cutout);
// FIXME (temporary assumption): Currently we assume that the node background is a rectangle
// with always rounded corners. Ideally we would somehow use actual source node's background
// shape for this.
cutout.set_corner_radius(crate::component::node::CORNER_RADIUS);
self.layers().edge_above_nodes_cutout.add(&cutout);
// Pointer events must be enabled, so that the hover area is masked out as well.
cutout.set_pointer_events(true);
cutout
}
}
// =========================
// === Rendering Corners ===
// =========================
/// Set the given [`Rectangle`]'s geometry to draw this corner shape.
///
/// Note that the shape's `inset` and `border` should be the same value as the provided
/// [`line_width`]. They are not set here as an optimization: When shapes are reused, the value does
/// not need to be set again, reducing needed GPU uploads.
pub(super) fn draw_corner(
shape: Rectangle,
corner: Corner,
color: color::Rgba,
line_width: f32,
) -> Rectangle {
shape.set_xy(corner.origin(line_width));
shape.set_size(corner.size(line_width));
shape.set_clip(corner.clip());
shape.set_corner_radius(corner.radius(line_width));
shape.set_border_color(color);
shape
}
// ==============================
// === Rendering Partial Arcs ===
// ==============================
/// Apply the specified arc-splitting parameters to the given arc shapes.
pub(super) fn draw_split_arc(arc_shapes: [arc::View; 2], split_arc: SplitArc) -> [arc::View; 2] {
let outer_radius = split_arc.radius + LINE_WIDTH / 2.0;
let arc_box = Vector2(outer_radius * 2.0, outer_radius * 2.0);
let arc_offset = Vector2(-outer_radius, -outer_radius);
let geometry = ArcGeometry::bisection(
split_arc.source_end_angle,
split_arc.split_angle,
split_arc.target_end_angle,
);
for (shape, geometry) in arc_shapes.iter().zip(&geometry) {
shape.set_xy(split_arc.origin + arc_offset);
shape.set_size(arc_box);
shape.outer_radius.set(outer_radius);
shape.start_angle.set(geometry.start);
shape.sector_angle.set(geometry.sector);
}
arc_shapes
}
// === Arc geometry ===
#[derive(Debug, Copy, Clone, PartialEq)]
struct ArcGeometry {
start: f32,
sector: f32,
}
impl ArcGeometry {
fn bisection(a: f32, b: f32, c: f32) -> [Self; 2] {
[Self::new_minor(a, b), Self::new_minor(b, c)]
}
fn new_minor(a: f32, b: f32) -> Self {
let start = minor_arc_start(a, b);
let sector = minor_arc_sector(a, b);
Self { start, sector }
}
}
fn minor_arc_start(a: f32, b: f32) -> f32 {
let a = a.rem_euclid(TAU);
let b = b.rem_euclid(TAU);
let wrapped = (a - b).abs() >= PI;
if wrapped {
if a < f32::EPSILON {
b
} else {
a
}
} else {
min(a, b)
}
}
fn minor_arc_sector(a: f32, b: f32) -> f32 {
let a = a.abs();
let b = b.abs();
let ab = (a - b).abs();
min(ab, TAU - ab)
} | arc.stroke_width.set(LINE_WIDTH);
self.display_object().add_child(&arc);
self.layers().edge_below_nodes.add(&arc);
arc
} | random_line_split |
render.rs | //! Definitions, constructors, and management for the EnsoGL shapes that are used to draw an edge.
//!
//! The core function of this module is to translate edge layouts into the shape parameters that
//! will implement them.
use crate::prelude::*;
use ensogl::display::shape::*;
use crate::GraphLayers;
use super::layout::Corner;
use super::layout::EdgeSplit;
use super::layout::Oriented;
use super::layout::SplitArc;
use super::layout::TargetAttachment;
use ensogl::data::color;
use ensogl::display;
use ensogl::display::scene::Scene;
use std::f32::consts::FRAC_PI_2;
use std::f32::consts::PI;
use std::f32::consts::TAU;
// =================
// === Constants ===
// =================
const LINE_WIDTH: f32 = 4.0;
const HOVER_EXTENSION: f32 = 10.0;
pub(super) const HOVER_WIDTH: f32 = LINE_WIDTH + HOVER_EXTENSION;
mod arrow {
use super::*;
pub(super) const SIZE: Vector2 = Vector2(18.75, 18.75);
}
mod attachment {
/// Extra length to add to the top and bottom of the target-attachment bit, to ensure that it
/// appears to pass through the top of the node. Without this adjustment, inexact
/// floating-point math and anti-aliasing would cause a 1-pixel gap artifact right where
/// the attachment should meet the corner at the edge of the node.
pub(super) const LENGTH_ADJUSTMENT: f32 = 0.1;
}
// ===================
// === Edge Shapes ===
// ===================
/// The shapes used to render an edge.
#[derive(Debug, Default)]
pub(super) struct Shapes {
/// The individual [`Corner`]s making up the edge. Each is drawn in the focused or unfocused
/// color.
sections: RefCell<Vec<Rectangle>>,
/// A pair of [`arc`] shapes used when the mouse is over the rounded corner, and the edge must
/// must be split into focused and unfocused sides at a certain angle along the arc.
split_arc: RefCell<Option<[arc::View; 2]>>,
/// Wider versions of the [`sections`], for receiving mouse events.
hover_sections: RefCell<Vec<Rectangle>>,
/// The end of the edge that is drawn on top of the node and connects to the target node's
/// input port.
target_attachment: RefCell<Option<Rectangle>>,
/// Arrow drawn on long backward edges to indicate data flow direction.
dataflow_arrow: RefCell<Option<Rectangle>>,
/// An rectangle representing the source node shape when the edge is in detached state. Used
/// to mask out the edge fragment that would otherwise be drawn over the source node.
source_cutout: RefCell<Option<Rectangle>>,
}
impl Shapes {
/// Redraw the arrow used to mark long backward edges.
pub(super) fn redraw_dataflow_arrow(
&self,
parent: &impl ShapeParent,
parameters: RedrawDataflowArrow,
) {
let RedrawDataflowArrow { arrow, source_color, target_color, focus_split, is_attached } =
parameters;
let shape = self.dataflow_arrow.take();
if let Some(arrow_center) = arrow {
// The arrow will have the same color as the target-end of the first corner from the
// source (this is the `arrow_center` point).
let color = match focus_split.map(|split| split.corner_index) {
Some(0) => target_color,
_ => source_color,
};
let shape = shape.unwrap_or_else(|| parent.new_dataflow_arrow());
shape.set_xy(arrow_center - arrow::SIZE / 2.0);
shape.set_color(color);
Self::set_layer(parent, &shape, is_attached, false);
self.dataflow_arrow.replace(Some(shape));
}
}
/// Redraw the invisible mouse-event-catching edges.
pub(super) fn | (
&self,
parent: &impl ShapeParent,
corners: &[Oriented<Corner>],
) {
let hover_factory = self
.hover_sections
.take()
.into_iter()
.chain(iter::repeat_with(|| parent.new_hover_section()));
*self.hover_sections.borrow_mut() = corners
.iter()
.zip(hover_factory)
.map(|(corner, shape)| draw_corner(shape, **corner, INVISIBLE_HOVER_COLOR, HOVER_WIDTH))
.collect();
}
/// Redraw the sections, each of which is a [`Rectangle`] implementing a [`Corner`], or multiple
/// [`Rectangle`]s and multiple [`arc::View`]s, if it is a split [`Corner`].
pub(super) fn redraw_sections(&self, parent: &impl ShapeParent, parameters: RedrawSections) {
let RedrawSections { corners, source_color, target_color, focus_split, is_attached } =
parameters;
let corner_index =
focus_split.map(|split| split.corner_index).unwrap_or_else(|| corners.len());
let split_corner = focus_split.map(|split| split.split_corner);
let mut section_factory =
self.sections.take().into_iter().chain(iter::repeat_with(|| parent.new_section()));
let mut new_sections = self.redraw_complete_sections(
&mut section_factory,
corners,
corner_index,
source_color,
target_color,
);
let arc_shapes = self.split_arc.take();
if let Some(split_corner) = split_corner {
if let Some(split_arc) = split_corner.split_arc {
let arc_shapes = arc_shapes.unwrap_or_else(|| [parent.new_arc(), parent.new_arc()]);
let arc_shapes = draw_split_arc(arc_shapes, split_arc);
arc_shapes[0].color.set(source_color.into());
arc_shapes[1].color.set(target_color.into());
self.split_arc.replace(Some(arc_shapes));
}
let (source_shape, target_shape) =
(section_factory.next().unwrap(), section_factory.next().unwrap());
new_sections.extend([
draw_corner(source_shape, *split_corner.source_end, source_color, LINE_WIDTH),
draw_corner(target_shape, *split_corner.target_end, target_color, LINE_WIDTH),
]);
}
for (i, shape) in new_sections.iter().enumerate() {
Self::set_layer(parent, shape, is_attached, i == 0);
}
*self.sections.borrow_mut() = new_sections;
}
pub(crate) fn redraw_cutout(
&self,
parent: &impl ShapeParent,
is_attached: bool,
source_size: Vector2,
) {
let cutout = self.source_cutout.take();
if!is_attached {
let cutout = cutout.unwrap_or_else(|| parent.new_cutout());
cutout.set_xy(-source_size / 2.0);
cutout.set_size(source_size);
self.source_cutout.replace(Some(cutout));
}
}
/// Redraw the sections that aren't split by the focus position.
pub(super) fn redraw_complete_sections(
&self,
section_factory: impl Iterator<Item = Rectangle>,
corners: &[Oriented<Corner>],
corner_index: usize,
source_color: color::Rgba,
target_color: color::Rgba,
) -> Vec<Rectangle> {
corners
.iter()
.enumerate()
.filter_map(|(i, corner)| {
if i == corner_index {
None
} else {
let color = match i < corner_index {
true => source_color,
false => target_color,
};
Some((color, corner))
}
})
.zip(section_factory)
.map(|((color, corner), shape)| draw_corner(shape, **corner, color, LINE_WIDTH))
.collect()
}
/// Redraw the little bit that goes on top of the target node.
pub(super) fn redraw_target_attachment(
&self,
parent: &impl ShapeParent,
target_attachment: Option<TargetAttachment>,
color: color::Rgba,
) {
let shape = self.target_attachment.take();
if let Some(TargetAttachment { target, length }) = target_attachment
&& length > f32::EPSILON {
let shape = shape.unwrap_or_else(|| parent.new_target_attachment());
shape.set_size_y(length + attachment::LENGTH_ADJUSTMENT * 2.0);
let offset = Vector2(-LINE_WIDTH / 2.0, - length - attachment::LENGTH_ADJUSTMENT);
shape.set_xy(target + offset);
shape.set_color(color);
self.target_attachment.replace(Some(shape));
}
}
/// Add the given shape to the appropriate layer depending on whether it is attached.
fn set_layer(
parent: &impl ShapeParent,
shape: &Rectangle,
below_nodes: bool,
near_source: bool,
) {
let layers = parent.layers();
let layer = if below_nodes {
&layers.edge_below_nodes
} else if near_source {
&layers.masked_edge_above_nodes
} else {
&layers.edge_above_nodes
};
layer.add(shape);
}
}
// === Redraw parameters ====
/// Arguments passed to [`Shapes::redraw_sections`].
pub(super) struct RedrawSections<'a> {
/// The corners to be redrawn.
pub(super) corners: &'a [Oriented<Corner>],
/// The color to use for the part of the edge closer to the source.
pub(super) source_color: color::Rgba,
/// The color to use for the part of the edge closer to the target.
pub(super) target_color: color::Rgba,
/// Where the edge should be split into differently-colored source and target parts.
pub(super) focus_split: Option<EdgeSplit>,
/// Whether the edge is fully-attached.
pub(super) is_attached: bool,
}
/// Arguments passed to [`Shapes::redraw_dataflow_arrow`].
pub(super) struct RedrawDataflowArrow {
/// The center of the arrow, if the arrow should be drawn.
pub(super) arrow: Option<Vector2>,
/// The color to use for the part of the edge closer to the source.
pub(super) source_color: color::Rgba,
/// The color to use for the part of the edge closer to the target.
pub(super) target_color: color::Rgba,
/// Where the edge should be split into differently-colored source and target parts.
pub(super) focus_split: Option<EdgeSplit>,
/// Whether the edge is fully-attached.
pub(super) is_attached: bool,
}
// =========================
// === Shape Definitions ===
// =========================
/// An arc around the origin. `outer_radius` determines the distance from the origin to the outer
/// edge of the arc, `stroke_width` the width of the arc. The arc starts at `start_angle`, relative
/// to the origin. Its radial size is `sector_angle`. The ends are flat, not rounded as in
/// [`RoundedArc`].
mod arc {
use super::*;
ensogl::shape! {
pointer_events = false;
(
style: Style,
color: Vector4,
outer_radius: f32,
stroke_width: f32,
start_angle: f32,
sector_angle: f32,
) {
let circle = Circle(outer_radius.px()) - Circle((outer_radius - stroke_width).px());
let angle_adjust = Var::<f32>::from(FRAC_PI_2);
let rotate_angle = -start_angle + angle_adjust - §or_angle / 2.0;
let angle = PlaneAngleFast(sector_angle).rotate(rotate_angle);
let angle = angle.grow(0.5.px());
let shape = circle * angle;
let shape = shape.fill(color);
shape.into()
}
}
}
// ======================
// === Shape Creation ===
// ======================
pub(super) trait ShapeParent: display::Object {
fn scene(&self) -> &Scene;
fn layers(&self) -> &GraphLayers;
/// Create a shape object to render one of the [`Corner`]s making up the edge.
fn new_section(&self) -> Rectangle {
let new = Rectangle::new();
new.set_inner_border(LINE_WIDTH, 0.0);
new.set_color(color::Rgba::transparent());
new.set_pointer_events(false);
self.display_object().add_child(&new);
new
}
/// Create a shape object to render the invisible hover area corresponding to one of the
/// [`Corner`]s making up the edge.
fn new_hover_section(&self) -> Rectangle {
let new = Rectangle::new();
new.set_inner_border(HOVER_WIDTH, 0.0);
new.set_color(color::Rgba::transparent());
self.display_object().add_child(&new);
self.layers().edge_below_nodes.add(&new);
new
}
/// Create a shape object to render an arbitrary-angle arc. This is used when the focus is split
/// in the rounded part of a [`Corner`].
fn new_arc(&self) -> arc::View {
let arc = arc::View::new();
arc.stroke_width.set(LINE_WIDTH);
self.display_object().add_child(&arc);
self.layers().edge_below_nodes.add(&arc);
arc
}
/// Create a shape object to render the little bit at the target end of the edge, that draws on
/// top of the node.
fn new_target_attachment(&self) -> Rectangle {
let new = Rectangle::new();
new.set_size_x(LINE_WIDTH);
new.set_border_color(color::Rgba::transparent());
new.set_pointer_events(false);
self.display_object().add_child(&new);
self.layers().edge_above_nodes.add(&new);
new
}
/// Create a shape object to render the arrow that is drawn on long backward edges to show the
/// direction of data flow.
fn new_dataflow_arrow(&self) -> Rectangle {
let new = SimpleTriangle::from_size(arrow::SIZE);
new.set_pointer_events(false);
self.display_object().add_child(&new);
new.into()
}
/// Create a shape object to render the cutout mask for the edge nearby the source node.
fn new_cutout(&self) -> Rectangle {
let cutout = Rectangle::new();
self.display_object().add_child(&cutout);
// FIXME (temporary assumption): Currently we assume that the node background is a rectangle
// with always rounded corners. Ideally we would somehow use actual source node's background
// shape for this.
cutout.set_corner_radius(crate::component::node::CORNER_RADIUS);
self.layers().edge_above_nodes_cutout.add(&cutout);
// Pointer events must be enabled, so that the hover area is masked out as well.
cutout.set_pointer_events(true);
cutout
}
}
// =========================
// === Rendering Corners ===
// =========================
/// Set the given [`Rectangle`]'s geometry to draw this corner shape.
///
/// Note that the shape's `inset` and `border` should be the same value as the provided
/// [`line_width`]. They are not set here as an optimization: When shapes are reused, the value does
/// not need to be set again, reducing needed GPU uploads.
pub(super) fn draw_corner(
shape: Rectangle,
corner: Corner,
color: color::Rgba,
line_width: f32,
) -> Rectangle {
shape.set_xy(corner.origin(line_width));
shape.set_size(corner.size(line_width));
shape.set_clip(corner.clip());
shape.set_corner_radius(corner.radius(line_width));
shape.set_border_color(color);
shape
}
// ==============================
// === Rendering Partial Arcs ===
// ==============================
/// Apply the specified arc-splitting parameters to the given arc shapes.
pub(super) fn draw_split_arc(arc_shapes: [arc::View; 2], split_arc: SplitArc) -> [arc::View; 2] {
let outer_radius = split_arc.radius + LINE_WIDTH / 2.0;
let arc_box = Vector2(outer_radius * 2.0, outer_radius * 2.0);
let arc_offset = Vector2(-outer_radius, -outer_radius);
let geometry = ArcGeometry::bisection(
split_arc.source_end_angle,
split_arc.split_angle,
split_arc.target_end_angle,
);
for (shape, geometry) in arc_shapes.iter().zip(&geometry) {
shape.set_xy(split_arc.origin + arc_offset);
shape.set_size(arc_box);
shape.outer_radius.set(outer_radius);
shape.start_angle.set(geometry.start);
shape.sector_angle.set(geometry.sector);
}
arc_shapes
}
// === Arc geometry ===
#[derive(Debug, Copy, Clone, PartialEq)]
struct ArcGeometry {
start: f32,
sector: f32,
}
impl ArcGeometry {
fn bisection(a: f32, b: f32, c: f32) -> [Self; 2] {
[Self::new_minor(a, b), Self::new_minor(b, c)]
}
fn new_minor(a: f32, b: f32) -> Self {
let start = minor_arc_start(a, b);
let sector = minor_arc_sector(a, b);
Self { start, sector }
}
}
fn minor_arc_start(a: f32, b: f32) -> f32 {
let a = a.rem_euclid(TAU);
let b = b.rem_euclid(TAU);
let wrapped = (a - b).abs() >= PI;
if wrapped {
if a < f32::EPSILON {
b
} else {
a
}
} else {
min(a, b)
}
}
fn minor_arc_sector(a: f32, b: f32) -> f32 {
let a = a.abs();
let b = b.abs();
let ab = (a - b).abs();
min(ab, TAU - ab)
}
| redraw_hover_sections | identifier_name |
render.rs | //! Definitions, constructors, and management for the EnsoGL shapes that are used to draw an edge.
//!
//! The core function of this module is to translate edge layouts into the shape parameters that
//! will implement them.
use crate::prelude::*;
use ensogl::display::shape::*;
use crate::GraphLayers;
use super::layout::Corner;
use super::layout::EdgeSplit;
use super::layout::Oriented;
use super::layout::SplitArc;
use super::layout::TargetAttachment;
use ensogl::data::color;
use ensogl::display;
use ensogl::display::scene::Scene;
use std::f32::consts::FRAC_PI_2;
use std::f32::consts::PI;
use std::f32::consts::TAU;
// =================
// === Constants ===
// =================
const LINE_WIDTH: f32 = 4.0;
const HOVER_EXTENSION: f32 = 10.0;
pub(super) const HOVER_WIDTH: f32 = LINE_WIDTH + HOVER_EXTENSION;
mod arrow {
use super::*;
pub(super) const SIZE: Vector2 = Vector2(18.75, 18.75);
}
mod attachment {
/// Extra length to add to the top and bottom of the target-attachment bit, to ensure that it
/// appears to pass through the top of the node. Without this adjustment, inexact
/// floating-point math and anti-aliasing would cause a 1-pixel gap artifact right where
/// the attachment should meet the corner at the edge of the node.
pub(super) const LENGTH_ADJUSTMENT: f32 = 0.1;
}
// ===================
// === Edge Shapes ===
// ===================
/// The shapes used to render an edge.
#[derive(Debug, Default)]
pub(super) struct Shapes {
/// The individual [`Corner`]s making up the edge. Each is drawn in the focused or unfocused
/// color.
sections: RefCell<Vec<Rectangle>>,
/// A pair of [`arc`] shapes used when the mouse is over the rounded corner, and the edge must
/// must be split into focused and unfocused sides at a certain angle along the arc.
split_arc: RefCell<Option<[arc::View; 2]>>,
/// Wider versions of the [`sections`], for receiving mouse events.
hover_sections: RefCell<Vec<Rectangle>>,
/// The end of the edge that is drawn on top of the node and connects to the target node's
/// input port.
target_attachment: RefCell<Option<Rectangle>>,
/// Arrow drawn on long backward edges to indicate data flow direction.
dataflow_arrow: RefCell<Option<Rectangle>>,
/// An rectangle representing the source node shape when the edge is in detached state. Used
/// to mask out the edge fragment that would otherwise be drawn over the source node.
source_cutout: RefCell<Option<Rectangle>>,
}
impl Shapes {
/// Redraw the arrow used to mark long backward edges.
pub(super) fn redraw_dataflow_arrow(
&self,
parent: &impl ShapeParent,
parameters: RedrawDataflowArrow,
) {
let RedrawDataflowArrow { arrow, source_color, target_color, focus_split, is_attached } =
parameters;
let shape = self.dataflow_arrow.take();
if let Some(arrow_center) = arrow {
// The arrow will have the same color as the target-end of the first corner from the
// source (this is the `arrow_center` point).
let color = match focus_split.map(|split| split.corner_index) {
Some(0) => target_color,
_ => source_color,
};
let shape = shape.unwrap_or_else(|| parent.new_dataflow_arrow());
shape.set_xy(arrow_center - arrow::SIZE / 2.0);
shape.set_color(color);
Self::set_layer(parent, &shape, is_attached, false);
self.dataflow_arrow.replace(Some(shape));
}
}
/// Redraw the invisible mouse-event-catching edges.
pub(super) fn redraw_hover_sections(
&self,
parent: &impl ShapeParent,
corners: &[Oriented<Corner>],
) {
let hover_factory = self
.hover_sections
.take()
.into_iter()
.chain(iter::repeat_with(|| parent.new_hover_section()));
*self.hover_sections.borrow_mut() = corners
.iter()
.zip(hover_factory)
.map(|(corner, shape)| draw_corner(shape, **corner, INVISIBLE_HOVER_COLOR, HOVER_WIDTH))
.collect();
}
/// Redraw the sections, each of which is a [`Rectangle`] implementing a [`Corner`], or multiple
/// [`Rectangle`]s and multiple [`arc::View`]s, if it is a split [`Corner`].
pub(super) fn redraw_sections(&self, parent: &impl ShapeParent, parameters: RedrawSections) {
let RedrawSections { corners, source_color, target_color, focus_split, is_attached } =
parameters;
let corner_index =
focus_split.map(|split| split.corner_index).unwrap_or_else(|| corners.len());
let split_corner = focus_split.map(|split| split.split_corner);
let mut section_factory =
self.sections.take().into_iter().chain(iter::repeat_with(|| parent.new_section()));
let mut new_sections = self.redraw_complete_sections(
&mut section_factory,
corners,
corner_index,
source_color,
target_color,
);
let arc_shapes = self.split_arc.take();
if let Some(split_corner) = split_corner {
if let Some(split_arc) = split_corner.split_arc {
let arc_shapes = arc_shapes.unwrap_or_else(|| [parent.new_arc(), parent.new_arc()]);
let arc_shapes = draw_split_arc(arc_shapes, split_arc);
arc_shapes[0].color.set(source_color.into());
arc_shapes[1].color.set(target_color.into());
self.split_arc.replace(Some(arc_shapes));
}
let (source_shape, target_shape) =
(section_factory.next().unwrap(), section_factory.next().unwrap());
new_sections.extend([
draw_corner(source_shape, *split_corner.source_end, source_color, LINE_WIDTH),
draw_corner(target_shape, *split_corner.target_end, target_color, LINE_WIDTH),
]);
}
for (i, shape) in new_sections.iter().enumerate() {
Self::set_layer(parent, shape, is_attached, i == 0);
}
*self.sections.borrow_mut() = new_sections;
}
pub(crate) fn redraw_cutout(
&self,
parent: &impl ShapeParent,
is_attached: bool,
source_size: Vector2,
) {
let cutout = self.source_cutout.take();
if!is_attached {
let cutout = cutout.unwrap_or_else(|| parent.new_cutout());
cutout.set_xy(-source_size / 2.0);
cutout.set_size(source_size);
self.source_cutout.replace(Some(cutout));
}
}
/// Redraw the sections that aren't split by the focus position.
pub(super) fn redraw_complete_sections(
&self,
section_factory: impl Iterator<Item = Rectangle>,
corners: &[Oriented<Corner>],
corner_index: usize,
source_color: color::Rgba,
target_color: color::Rgba,
) -> Vec<Rectangle> {
corners
.iter()
.enumerate()
.filter_map(|(i, corner)| {
if i == corner_index {
None
} else {
let color = match i < corner_index {
true => source_color,
false => target_color,
};
Some((color, corner))
}
})
.zip(section_factory)
.map(|((color, corner), shape)| draw_corner(shape, **corner, color, LINE_WIDTH))
.collect()
}
/// Redraw the little bit that goes on top of the target node.
pub(super) fn redraw_target_attachment(
&self,
parent: &impl ShapeParent,
target_attachment: Option<TargetAttachment>,
color: color::Rgba,
) {
let shape = self.target_attachment.take();
if let Some(TargetAttachment { target, length }) = target_attachment
&& length > f32::EPSILON {
let shape = shape.unwrap_or_else(|| parent.new_target_attachment());
shape.set_size_y(length + attachment::LENGTH_ADJUSTMENT * 2.0);
let offset = Vector2(-LINE_WIDTH / 2.0, - length - attachment::LENGTH_ADJUSTMENT);
shape.set_xy(target + offset);
shape.set_color(color);
self.target_attachment.replace(Some(shape));
}
}
/// Add the given shape to the appropriate layer depending on whether it is attached.
fn set_layer(
parent: &impl ShapeParent,
shape: &Rectangle,
below_nodes: bool,
near_source: bool,
) {
let layers = parent.layers();
let layer = if below_nodes {
&layers.edge_below_nodes
} else if near_source {
&layers.masked_edge_above_nodes
} else {
&layers.edge_above_nodes
};
layer.add(shape);
}
}
// === Redraw parameters ====
/// Arguments passed to [`Shapes::redraw_sections`].
pub(super) struct RedrawSections<'a> {
/// The corners to be redrawn.
pub(super) corners: &'a [Oriented<Corner>],
/// The color to use for the part of the edge closer to the source.
pub(super) source_color: color::Rgba,
/// The color to use for the part of the edge closer to the target.
pub(super) target_color: color::Rgba,
/// Where the edge should be split into differently-colored source and target parts.
pub(super) focus_split: Option<EdgeSplit>,
/// Whether the edge is fully-attached.
pub(super) is_attached: bool,
}
/// Arguments passed to [`Shapes::redraw_dataflow_arrow`].
pub(super) struct RedrawDataflowArrow {
/// The center of the arrow, if the arrow should be drawn.
pub(super) arrow: Option<Vector2>,
/// The color to use for the part of the edge closer to the source.
pub(super) source_color: color::Rgba,
/// The color to use for the part of the edge closer to the target.
pub(super) target_color: color::Rgba,
/// Where the edge should be split into differently-colored source and target parts.
pub(super) focus_split: Option<EdgeSplit>,
/// Whether the edge is fully-attached.
pub(super) is_attached: bool,
}
// =========================
// === Shape Definitions ===
// =========================
/// An arc around the origin. `outer_radius` determines the distance from the origin to the outer
/// edge of the arc, `stroke_width` the width of the arc. The arc starts at `start_angle`, relative
/// to the origin. Its radial size is `sector_angle`. The ends are flat, not rounded as in
/// [`RoundedArc`].
mod arc {
use super::*;
ensogl::shape! {
pointer_events = false;
(
style: Style,
color: Vector4,
outer_radius: f32,
stroke_width: f32,
start_angle: f32,
sector_angle: f32,
) {
let circle = Circle(outer_radius.px()) - Circle((outer_radius - stroke_width).px());
let angle_adjust = Var::<f32>::from(FRAC_PI_2);
let rotate_angle = -start_angle + angle_adjust - §or_angle / 2.0;
let angle = PlaneAngleFast(sector_angle).rotate(rotate_angle);
let angle = angle.grow(0.5.px());
let shape = circle * angle;
let shape = shape.fill(color);
shape.into()
}
}
}
// ======================
// === Shape Creation ===
// ======================
pub(super) trait ShapeParent: display::Object {
fn scene(&self) -> &Scene;
fn layers(&self) -> &GraphLayers;
/// Create a shape object to render one of the [`Corner`]s making up the edge.
fn new_section(&self) -> Rectangle {
let new = Rectangle::new();
new.set_inner_border(LINE_WIDTH, 0.0);
new.set_color(color::Rgba::transparent());
new.set_pointer_events(false);
self.display_object().add_child(&new);
new
}
/// Create a shape object to render the invisible hover area corresponding to one of the
/// [`Corner`]s making up the edge.
fn new_hover_section(&self) -> Rectangle {
let new = Rectangle::new();
new.set_inner_border(HOVER_WIDTH, 0.0);
new.set_color(color::Rgba::transparent());
self.display_object().add_child(&new);
self.layers().edge_below_nodes.add(&new);
new
}
/// Create a shape object to render an arbitrary-angle arc. This is used when the focus is split
/// in the rounded part of a [`Corner`].
fn new_arc(&self) -> arc::View {
let arc = arc::View::new();
arc.stroke_width.set(LINE_WIDTH);
self.display_object().add_child(&arc);
self.layers().edge_below_nodes.add(&arc);
arc
}
/// Create a shape object to render the little bit at the target end of the edge, that draws on
/// top of the node.
fn new_target_attachment(&self) -> Rectangle {
let new = Rectangle::new();
new.set_size_x(LINE_WIDTH);
new.set_border_color(color::Rgba::transparent());
new.set_pointer_events(false);
self.display_object().add_child(&new);
self.layers().edge_above_nodes.add(&new);
new
}
/// Create a shape object to render the arrow that is drawn on long backward edges to show the
/// direction of data flow.
fn new_dataflow_arrow(&self) -> Rectangle |
/// Create a shape object to render the cutout mask for the edge nearby the source node.
fn new_cutout(&self) -> Rectangle {
let cutout = Rectangle::new();
self.display_object().add_child(&cutout);
// FIXME (temporary assumption): Currently we assume that the node background is a rectangle
// with always rounded corners. Ideally we would somehow use actual source node's background
// shape for this.
cutout.set_corner_radius(crate::component::node::CORNER_RADIUS);
self.layers().edge_above_nodes_cutout.add(&cutout);
// Pointer events must be enabled, so that the hover area is masked out as well.
cutout.set_pointer_events(true);
cutout
}
}
// =========================
// === Rendering Corners ===
// =========================
/// Set the given [`Rectangle`]'s geometry to draw this corner shape.
///
/// Note that the shape's `inset` and `border` should be the same value as the provided
/// [`line_width`]. They are not set here as an optimization: When shapes are reused, the value does
/// not need to be set again, reducing needed GPU uploads.
pub(super) fn draw_corner(
shape: Rectangle,
corner: Corner,
color: color::Rgba,
line_width: f32,
) -> Rectangle {
shape.set_xy(corner.origin(line_width));
shape.set_size(corner.size(line_width));
shape.set_clip(corner.clip());
shape.set_corner_radius(corner.radius(line_width));
shape.set_border_color(color);
shape
}
// ==============================
// === Rendering Partial Arcs ===
// ==============================
/// Apply the specified arc-splitting parameters to the given arc shapes.
pub(super) fn draw_split_arc(arc_shapes: [arc::View; 2], split_arc: SplitArc) -> [arc::View; 2] {
let outer_radius = split_arc.radius + LINE_WIDTH / 2.0;
let arc_box = Vector2(outer_radius * 2.0, outer_radius * 2.0);
let arc_offset = Vector2(-outer_radius, -outer_radius);
let geometry = ArcGeometry::bisection(
split_arc.source_end_angle,
split_arc.split_angle,
split_arc.target_end_angle,
);
for (shape, geometry) in arc_shapes.iter().zip(&geometry) {
shape.set_xy(split_arc.origin + arc_offset);
shape.set_size(arc_box);
shape.outer_radius.set(outer_radius);
shape.start_angle.set(geometry.start);
shape.sector_angle.set(geometry.sector);
}
arc_shapes
}
// === Arc geometry ===
#[derive(Debug, Copy, Clone, PartialEq)]
struct ArcGeometry {
start: f32,
sector: f32,
}
impl ArcGeometry {
fn bisection(a: f32, b: f32, c: f32) -> [Self; 2] {
[Self::new_minor(a, b), Self::new_minor(b, c)]
}
fn new_minor(a: f32, b: f32) -> Self {
let start = minor_arc_start(a, b);
let sector = minor_arc_sector(a, b);
Self { start, sector }
}
}
fn minor_arc_start(a: f32, b: f32) -> f32 {
let a = a.rem_euclid(TAU);
let b = b.rem_euclid(TAU);
let wrapped = (a - b).abs() >= PI;
if wrapped {
if a < f32::EPSILON {
b
} else {
a
}
} else {
min(a, b)
}
}
fn minor_arc_sector(a: f32, b: f32) -> f32 {
let a = a.abs();
let b = b.abs();
let ab = (a - b).abs();
min(ab, TAU - ab)
}
| {
let new = SimpleTriangle::from_size(arrow::SIZE);
new.set_pointer_events(false);
self.display_object().add_child(&new);
new.into()
} | identifier_body |
render.rs | //! Definitions, constructors, and management for the EnsoGL shapes that are used to draw an edge.
//!
//! The core function of this module is to translate edge layouts into the shape parameters that
//! will implement them.
use crate::prelude::*;
use ensogl::display::shape::*;
use crate::GraphLayers;
use super::layout::Corner;
use super::layout::EdgeSplit;
use super::layout::Oriented;
use super::layout::SplitArc;
use super::layout::TargetAttachment;
use ensogl::data::color;
use ensogl::display;
use ensogl::display::scene::Scene;
use std::f32::consts::FRAC_PI_2;
use std::f32::consts::PI;
use std::f32::consts::TAU;
// =================
// === Constants ===
// =================
const LINE_WIDTH: f32 = 4.0;
const HOVER_EXTENSION: f32 = 10.0;
pub(super) const HOVER_WIDTH: f32 = LINE_WIDTH + HOVER_EXTENSION;
mod arrow {
use super::*;
pub(super) const SIZE: Vector2 = Vector2(18.75, 18.75);
}
mod attachment {
/// Extra length to add to the top and bottom of the target-attachment bit, to ensure that it
/// appears to pass through the top of the node. Without this adjustment, inexact
/// floating-point math and anti-aliasing would cause a 1-pixel gap artifact right where
/// the attachment should meet the corner at the edge of the node.
pub(super) const LENGTH_ADJUSTMENT: f32 = 0.1;
}
// ===================
// === Edge Shapes ===
// ===================
/// The shapes used to render an edge.
#[derive(Debug, Default)]
pub(super) struct Shapes {
/// The individual [`Corner`]s making up the edge. Each is drawn in the focused or unfocused
/// color.
sections: RefCell<Vec<Rectangle>>,
/// A pair of [`arc`] shapes used when the mouse is over the rounded corner, and the edge must
/// must be split into focused and unfocused sides at a certain angle along the arc.
split_arc: RefCell<Option<[arc::View; 2]>>,
/// Wider versions of the [`sections`], for receiving mouse events.
hover_sections: RefCell<Vec<Rectangle>>,
/// The end of the edge that is drawn on top of the node and connects to the target node's
/// input port.
target_attachment: RefCell<Option<Rectangle>>,
/// Arrow drawn on long backward edges to indicate data flow direction.
dataflow_arrow: RefCell<Option<Rectangle>>,
/// An rectangle representing the source node shape when the edge is in detached state. Used
/// to mask out the edge fragment that would otherwise be drawn over the source node.
source_cutout: RefCell<Option<Rectangle>>,
}
impl Shapes {
/// Redraw the arrow used to mark long backward edges.
pub(super) fn redraw_dataflow_arrow(
&self,
parent: &impl ShapeParent,
parameters: RedrawDataflowArrow,
) {
let RedrawDataflowArrow { arrow, source_color, target_color, focus_split, is_attached } =
parameters;
let shape = self.dataflow_arrow.take();
if let Some(arrow_center) = arrow {
// The arrow will have the same color as the target-end of the first corner from the
// source (this is the `arrow_center` point).
let color = match focus_split.map(|split| split.corner_index) {
Some(0) => target_color,
_ => source_color,
};
let shape = shape.unwrap_or_else(|| parent.new_dataflow_arrow());
shape.set_xy(arrow_center - arrow::SIZE / 2.0);
shape.set_color(color);
Self::set_layer(parent, &shape, is_attached, false);
self.dataflow_arrow.replace(Some(shape));
}
}
/// Redraw the invisible mouse-event-catching edges.
pub(super) fn redraw_hover_sections(
&self,
parent: &impl ShapeParent,
corners: &[Oriented<Corner>],
) {
let hover_factory = self
.hover_sections
.take()
.into_iter()
.chain(iter::repeat_with(|| parent.new_hover_section()));
*self.hover_sections.borrow_mut() = corners
.iter()
.zip(hover_factory)
.map(|(corner, shape)| draw_corner(shape, **corner, INVISIBLE_HOVER_COLOR, HOVER_WIDTH))
.collect();
}
/// Redraw the sections, each of which is a [`Rectangle`] implementing a [`Corner`], or multiple
/// [`Rectangle`]s and multiple [`arc::View`]s, if it is a split [`Corner`].
pub(super) fn redraw_sections(&self, parent: &impl ShapeParent, parameters: RedrawSections) {
let RedrawSections { corners, source_color, target_color, focus_split, is_attached } =
parameters;
let corner_index =
focus_split.map(|split| split.corner_index).unwrap_or_else(|| corners.len());
let split_corner = focus_split.map(|split| split.split_corner);
let mut section_factory =
self.sections.take().into_iter().chain(iter::repeat_with(|| parent.new_section()));
let mut new_sections = self.redraw_complete_sections(
&mut section_factory,
corners,
corner_index,
source_color,
target_color,
);
let arc_shapes = self.split_arc.take();
if let Some(split_corner) = split_corner {
if let Some(split_arc) = split_corner.split_arc {
let arc_shapes = arc_shapes.unwrap_or_else(|| [parent.new_arc(), parent.new_arc()]);
let arc_shapes = draw_split_arc(arc_shapes, split_arc);
arc_shapes[0].color.set(source_color.into());
arc_shapes[1].color.set(target_color.into());
self.split_arc.replace(Some(arc_shapes));
}
let (source_shape, target_shape) =
(section_factory.next().unwrap(), section_factory.next().unwrap());
new_sections.extend([
draw_corner(source_shape, *split_corner.source_end, source_color, LINE_WIDTH),
draw_corner(target_shape, *split_corner.target_end, target_color, LINE_WIDTH),
]);
}
for (i, shape) in new_sections.iter().enumerate() {
Self::set_layer(parent, shape, is_attached, i == 0);
}
*self.sections.borrow_mut() = new_sections;
}
pub(crate) fn redraw_cutout(
&self,
parent: &impl ShapeParent,
is_attached: bool,
source_size: Vector2,
) {
let cutout = self.source_cutout.take();
if!is_attached {
let cutout = cutout.unwrap_or_else(|| parent.new_cutout());
cutout.set_xy(-source_size / 2.0);
cutout.set_size(source_size);
self.source_cutout.replace(Some(cutout));
}
}
/// Redraw the sections that aren't split by the focus position.
pub(super) fn redraw_complete_sections(
&self,
section_factory: impl Iterator<Item = Rectangle>,
corners: &[Oriented<Corner>],
corner_index: usize,
source_color: color::Rgba,
target_color: color::Rgba,
) -> Vec<Rectangle> {
corners
.iter()
.enumerate()
.filter_map(|(i, corner)| {
if i == corner_index {
None
} else {
let color = match i < corner_index {
true => source_color,
false => target_color,
};
Some((color, corner))
}
})
.zip(section_factory)
.map(|((color, corner), shape)| draw_corner(shape, **corner, color, LINE_WIDTH))
.collect()
}
/// Redraw the little bit that goes on top of the target node.
pub(super) fn redraw_target_attachment(
&self,
parent: &impl ShapeParent,
target_attachment: Option<TargetAttachment>,
color: color::Rgba,
) {
let shape = self.target_attachment.take();
if let Some(TargetAttachment { target, length }) = target_attachment
&& length > f32::EPSILON {
let shape = shape.unwrap_or_else(|| parent.new_target_attachment());
shape.set_size_y(length + attachment::LENGTH_ADJUSTMENT * 2.0);
let offset = Vector2(-LINE_WIDTH / 2.0, - length - attachment::LENGTH_ADJUSTMENT);
shape.set_xy(target + offset);
shape.set_color(color);
self.target_attachment.replace(Some(shape));
}
}
/// Add the given shape to the appropriate layer depending on whether it is attached.
fn set_layer(
parent: &impl ShapeParent,
shape: &Rectangle,
below_nodes: bool,
near_source: bool,
) {
let layers = parent.layers();
let layer = if below_nodes {
&layers.edge_below_nodes
} else if near_source {
&layers.masked_edge_above_nodes
} else {
&layers.edge_above_nodes
};
layer.add(shape);
}
}
// === Redraw parameters ====
/// Arguments passed to [`Shapes::redraw_sections`].
pub(super) struct RedrawSections<'a> {
/// The corners to be redrawn.
pub(super) corners: &'a [Oriented<Corner>],
/// The color to use for the part of the edge closer to the source.
pub(super) source_color: color::Rgba,
/// The color to use for the part of the edge closer to the target.
pub(super) target_color: color::Rgba,
/// Where the edge should be split into differently-colored source and target parts.
pub(super) focus_split: Option<EdgeSplit>,
/// Whether the edge is fully-attached.
pub(super) is_attached: bool,
}
/// Arguments passed to [`Shapes::redraw_dataflow_arrow`].
pub(super) struct RedrawDataflowArrow {
/// The center of the arrow, if the arrow should be drawn.
pub(super) arrow: Option<Vector2>,
/// The color to use for the part of the edge closer to the source.
pub(super) source_color: color::Rgba,
/// The color to use for the part of the edge closer to the target.
pub(super) target_color: color::Rgba,
/// Where the edge should be split into differently-colored source and target parts.
pub(super) focus_split: Option<EdgeSplit>,
/// Whether the edge is fully-attached.
pub(super) is_attached: bool,
}
// =========================
// === Shape Definitions ===
// =========================
/// An arc around the origin. `outer_radius` determines the distance from the origin to the outer
/// edge of the arc, `stroke_width` the width of the arc. The arc starts at `start_angle`, relative
/// to the origin. Its radial size is `sector_angle`. The ends are flat, not rounded as in
/// [`RoundedArc`].
mod arc {
use super::*;
ensogl::shape! {
pointer_events = false;
(
style: Style,
color: Vector4,
outer_radius: f32,
stroke_width: f32,
start_angle: f32,
sector_angle: f32,
) {
let circle = Circle(outer_radius.px()) - Circle((outer_radius - stroke_width).px());
let angle_adjust = Var::<f32>::from(FRAC_PI_2);
let rotate_angle = -start_angle + angle_adjust - §or_angle / 2.0;
let angle = PlaneAngleFast(sector_angle).rotate(rotate_angle);
let angle = angle.grow(0.5.px());
let shape = circle * angle;
let shape = shape.fill(color);
shape.into()
}
}
}
// ======================
// === Shape Creation ===
// ======================
pub(super) trait ShapeParent: display::Object {
fn scene(&self) -> &Scene;
fn layers(&self) -> &GraphLayers;
/// Create a shape object to render one of the [`Corner`]s making up the edge.
fn new_section(&self) -> Rectangle {
let new = Rectangle::new();
new.set_inner_border(LINE_WIDTH, 0.0);
new.set_color(color::Rgba::transparent());
new.set_pointer_events(false);
self.display_object().add_child(&new);
new
}
/// Create a shape object to render the invisible hover area corresponding to one of the
/// [`Corner`]s making up the edge.
fn new_hover_section(&self) -> Rectangle {
let new = Rectangle::new();
new.set_inner_border(HOVER_WIDTH, 0.0);
new.set_color(color::Rgba::transparent());
self.display_object().add_child(&new);
self.layers().edge_below_nodes.add(&new);
new
}
/// Create a shape object to render an arbitrary-angle arc. This is used when the focus is split
/// in the rounded part of a [`Corner`].
fn new_arc(&self) -> arc::View {
let arc = arc::View::new();
arc.stroke_width.set(LINE_WIDTH);
self.display_object().add_child(&arc);
self.layers().edge_below_nodes.add(&arc);
arc
}
/// Create a shape object to render the little bit at the target end of the edge, that draws on
/// top of the node.
fn new_target_attachment(&self) -> Rectangle {
let new = Rectangle::new();
new.set_size_x(LINE_WIDTH);
new.set_border_color(color::Rgba::transparent());
new.set_pointer_events(false);
self.display_object().add_child(&new);
self.layers().edge_above_nodes.add(&new);
new
}
/// Create a shape object to render the arrow that is drawn on long backward edges to show the
/// direction of data flow.
fn new_dataflow_arrow(&self) -> Rectangle {
let new = SimpleTriangle::from_size(arrow::SIZE);
new.set_pointer_events(false);
self.display_object().add_child(&new);
new.into()
}
/// Create a shape object to render the cutout mask for the edge nearby the source node.
fn new_cutout(&self) -> Rectangle {
let cutout = Rectangle::new();
self.display_object().add_child(&cutout);
// FIXME (temporary assumption): Currently we assume that the node background is a rectangle
// with always rounded corners. Ideally we would somehow use actual source node's background
// shape for this.
cutout.set_corner_radius(crate::component::node::CORNER_RADIUS);
self.layers().edge_above_nodes_cutout.add(&cutout);
// Pointer events must be enabled, so that the hover area is masked out as well.
cutout.set_pointer_events(true);
cutout
}
}
// =========================
// === Rendering Corners ===
// =========================
/// Set the given [`Rectangle`]'s geometry to draw this corner shape.
///
/// Note that the shape's `inset` and `border` should be the same value as the provided
/// [`line_width`]. They are not set here as an optimization: When shapes are reused, the value does
/// not need to be set again, reducing needed GPU uploads.
pub(super) fn draw_corner(
shape: Rectangle,
corner: Corner,
color: color::Rgba,
line_width: f32,
) -> Rectangle {
shape.set_xy(corner.origin(line_width));
shape.set_size(corner.size(line_width));
shape.set_clip(corner.clip());
shape.set_corner_radius(corner.radius(line_width));
shape.set_border_color(color);
shape
}
// ==============================
// === Rendering Partial Arcs ===
// ==============================
/// Apply the specified arc-splitting parameters to the given arc shapes.
pub(super) fn draw_split_arc(arc_shapes: [arc::View; 2], split_arc: SplitArc) -> [arc::View; 2] {
let outer_radius = split_arc.radius + LINE_WIDTH / 2.0;
let arc_box = Vector2(outer_radius * 2.0, outer_radius * 2.0);
let arc_offset = Vector2(-outer_radius, -outer_radius);
let geometry = ArcGeometry::bisection(
split_arc.source_end_angle,
split_arc.split_angle,
split_arc.target_end_angle,
);
for (shape, geometry) in arc_shapes.iter().zip(&geometry) {
shape.set_xy(split_arc.origin + arc_offset);
shape.set_size(arc_box);
shape.outer_radius.set(outer_radius);
shape.start_angle.set(geometry.start);
shape.sector_angle.set(geometry.sector);
}
arc_shapes
}
// === Arc geometry ===
#[derive(Debug, Copy, Clone, PartialEq)]
struct ArcGeometry {
start: f32,
sector: f32,
}
impl ArcGeometry {
fn bisection(a: f32, b: f32, c: f32) -> [Self; 2] {
[Self::new_minor(a, b), Self::new_minor(b, c)]
}
fn new_minor(a: f32, b: f32) -> Self {
let start = minor_arc_start(a, b);
let sector = minor_arc_sector(a, b);
Self { start, sector }
}
}
fn minor_arc_start(a: f32, b: f32) -> f32 {
let a = a.rem_euclid(TAU);
let b = b.rem_euclid(TAU);
let wrapped = (a - b).abs() >= PI;
if wrapped {
if a < f32::EPSILON {
b
} else |
} else {
min(a, b)
}
}
fn minor_arc_sector(a: f32, b: f32) -> f32 {
let a = a.abs();
let b = b.abs();
let ab = (a - b).abs();
min(ab, TAU - ab)
}
| {
a
} | conditional_block |
worker_actions.rs | //! Tasks and task execution of workes
//!
//! Note: This module and submodules will sooner or later need some refactoring.
//! For now, I am still don't really know how I want it to look like.
mod worker_abilities;
mod worker_updates;
use crate::db::DB;
use crate::game_master::event::*;
use crate::game_master::town_worker::*;
use crate::town_view::*;
use actix::prelude::*;
use chrono::offset::TimeZone;
use chrono::{DateTime, Duration, NaiveDateTime, Utc};
use paddlers_shared_lib::api::tasks::*;
use paddlers_shared_lib::game_mechanics::worker::*;
use paddlers_shared_lib::prelude::*;
use worker_abilities::*;
use worker_updates::MutWorkerDBEntity;
trait WorkerAction {
fn x(&self) -> i32;
fn y(&self) -> i32;
fn task_type(&self) -> &TaskType;
fn target(&self) -> Option<HoboKey>;
}
pub struct ValidatedTaskList {
pub new_tasks: Vec<NewTask>,
pub update_tasks: Vec<Task>,
pub village_id: VillageKey,
}
pub(crate) fn validate_task_list(
db: &DB,
tl: &TaskList,
) -> Result<ValidatedTaskList, Box<dyn std::error::Error>> {
let worker_id = tl.worker_id;
// Load relevant data into memory
let mut worker = db.worker_priv(worker_id).ok_or("Worker does not exist")?;
let village_id = VillageKey(worker.home);
let mut town = TownView::load_village(db, village_id);
// check timing and effect of current task interruption
let mut current_task = db
.current_task(worker.key())
.expect("Must have a current task");
let mut timestamp =
interrupt_task(&mut current_task, &worker).ok_or("Cannot interrupt current task.")?;
worker.x = current_task.x;
worker.y = current_task.y;
// iterate tasks and match for task types
let mut tasks = vec![];
for task in tl.tasks.iter() {
// Validate target hobo exists if there is one
if let Some(target_id) = task.target {
db.hobo(HoboKey(target_id)).ok_or("No such hobo id")?;
}
validate_ability(db, task.task_type, worker_id, timestamp)?;
let new_task = NewTask {
worker_id: worker_id.num(),
task_type: task.task_type,
x: task.x as i32,
y: task.y as i32,
start_time: Some(timestamp),
target_hobo_id: task.target,
};
simulate_begin_task(&new_task, &mut town, &mut worker)?;
let duration = simulate_finish_task(&new_task, &mut town, &mut worker)?;
tasks.push(new_task);
timestamp += duration;
}
Ok(ValidatedTaskList {
new_tasks: tasks,
update_tasks: vec![current_task],
village_id,
})
}
pub(crate) fn replace_worker_tasks(
db: &DB,
worker: &Addr<TownWorker>,
worker_id: WorkerKey,
tasks: &[NewTask],
village_id: VillageKey,
) {
db.flush_task_queue(worker_id);
let _inserted = db.insert_tasks(tasks);
let current_task =
execute_worker_tasks(db, worker_id, village_id).expect("Worker has no current task");
if let Some(next_task) = db.earliest_future_task(worker_id) {
let event = Event::WorkerTask {
task_id: current_task.key(),
};
worker
.send(TownWorkerEventMsg(
event,
Utc.from_utc_datetime(&next_task.start_time),
))
.wait()
.expect("Send msg to actor");
}
}
fn interrupt_task(current_task: &mut Task, worker: &Worker) -> Option<NaiveDateTime> {
match current_task.task_type {
TaskType::Idle
| TaskType::ChopTree
| TaskType::Defend
| TaskType::GatherSticks
| TaskType::CollectReward => {
let now = chrono::Utc::now().naive_utc();
Some(now)
}
TaskType::Walk => {
let speed = unit_speed_to_worker_tiles_per_second(worker.speed) as f64;
let time_so_far: Duration = Utc::now().naive_utc() - current_task.start_time;
let steps = (speed * time_so_far.num_microseconds().unwrap() as f64 / 1_000_000.0)
.ceil() as i32;
let total_time = steps as f64 / speed;
let moment = current_task.start_time
+ chrono::Duration::microseconds((total_time * 1_000_000.0) as i64);
let dx = current_task.x - worker.x;
let dy = current_task.y - worker.y;
let x = if dx == 0 {
worker.x
} else if dx < 0 {
worker.x - steps
} else {
worker.x + steps
};
let y = if dy == 0 {
worker.y
} else if dy < 0 {
worker.y - steps
} else {
worker.y + steps
};
// Walking must terminate earlier
current_task.x = x;
current_task.y = y;
Some(moment)
}
TaskType::WelcomeAbility => {
let cast_time = current_task.start_time + AbilityType::Welcome.busy_duration();
Some(cast_time)
}
}
}
/// For the given worker, executes tasks on the DB that are due
fn execute_worker_tasks(db: &DB, worker_id: WorkerKey, village: VillageKey) -> Option<Task> {
let mut tasks = db.past_worker_tasks(worker_id);
let current_task = tasks.pop();
let mut town = TownView::load_village(db, village);
for task in tasks {
if let Err(e) = finish_task(db, task.key(), Some(task), Some(&mut town)) {
println!("Executing task failed: {}", e)
}
}
current_task
}
pub(crate) fn finish_task(
db: &DB,
task_id: TaskKey,
task: Option<Task>,
town: Option<&mut TownView>,
) -> Result<Option<(Event, DateTime<Utc>)>, Box<dyn std::error::Error>> {
let task = task.or_else(|| db.task(task_id));
if let Some(task) = task {
let mut worker = db
.worker_priv(task.worker())
.ok_or("Task references non-existing worker")?;
if let Some(town) = town {
crate::worker_actions::simulate_finish_task(&task, town, &mut worker)?;
apply_task_to_db(db, &task, &mut worker)?;
} else {
let mut town = TownView::load_village(db, VillageKey(worker.home));
crate::worker_actions::simulate_finish_task(&task, &mut town, &mut worker)?;
apply_task_to_db(db, &task, &mut worker)?;
}
db.update_worker(&worker);
db.update_worker_flag_timestamp_now(worker.key(), WorkerFlagType::Work);
db.delete_task(&task);
Ok(Event::load_next_worker_task(db, task.worker()))
} else {
// Already executed.
Ok(None)
}
}
fn apply_task_to_db(db: &DB, task: &Task, worker: &mut Worker) -> Result<(), String> {
match task.task_type {
TaskType::WelcomeAbility => {
let a = AbilityType::Welcome;
let (attribute, strength) = a.apply();
let ne = NewEffect {
hobo_id: task.target().ok_or("Ability must have a target")?.num(),
attribute,
strength: Some(strength),
start_time: None, // default = now
};
db.insert_effect(&ne);
db.update_ability_used_timestamp(WorkerKey(worker.id), a);
*worker.mana.as_mut().unwrap() -= AbilityType::Welcome.mana_cost();
}
TaskType::CollectReward => {
if let Some(building) = db.find_building_by_coordinates(task.x, task.y, worker.home()) {
match building.building_type.reward_exp() {
Some(exp) => {
worker.add_exp(exp);
db.delete_building(&building);
}
None => {
return Err(format!(
"Tried to collect {} as reward",
building.building_type
));
}
}
} else {
return Err(format!("No reward to collect at {},{}", task.x, task.y));
}
}
_ => { /* NOP */ }
}
Ok(())
}
/// (Try to) apply changes to village state that happen when a worker stops doing a given task.
/// E.g. remove unit from building.
/// Returns the time it takes until the task is actually finished.
fn simulate_finish_task<T: WorkerAction>(
task: &T,
town: &mut TownView,
worker: &mut Worker,
) -> Result<Duration, String> {
match task.task_type() {
TaskType::Idle => Ok(Duration::milliseconds(0)),
TaskType::Walk => Ok(worker_walk(
town,
worker,
(task.x() as usize, task.y() as usize),
)?),
TaskType::GatherSticks | TaskType::ChopTree => {
town.state
.register_task_end(*task.task_type())
.map_err(|e| e.to_string())?;
worker_out_of_building(town, worker, (task.x() as usize, task.y() as usize))
}
TaskType::WelcomeAbility => {
let a = AbilityType::Welcome;
let duration = a.busy_duration();
Ok(duration)
}
TaskType::CollectReward => {
// Lookup object to be collected, then delete it in TownView
// Note: DB update is separate
let index = (task.x() as usize, task.y() as usize);
town.state.remove(&index);
Ok(Duration::milliseconds(0))
}
TaskType::Defend => Err("Task not implemented".to_owned()),
}
}
/// (Try to) apply changes to village state that happen when a worker starts a given task.
/// E.g. add unit to a building, or pay required price (only if it is TownView),...
fn simulate_begin_task<T: WorkerAction>(
task: &T,
town: &mut TownView,
worker: &mut Worker,
) -> Result<(), String> {
match task.task_type() {
TaskType::Idle | TaskType::Walk | TaskType::CollectReward => Ok(()),
TaskType::GatherSticks | TaskType::ChopTree => {
town.state
.register_task_begin(*task.task_type())
.map_err(|e| e.to_string())?;
worker_into_building(town, worker, (task.x() as usize, task.y() as usize))
}
TaskType::WelcomeAbility => {
if let Some(mana) = &mut worker.mana {
let cost = AbilityType::Welcome.mana_cost();
if *mana >= cost {
*mana = *mana - cost;
Ok(())
} else {
Err("Not enough mana".to_owned())
}
} else {
Err("Worker has no mana but tries to use welcome ability".to_owned())
}
}
TaskType::Defend => Err("Task not implemented".to_owned()),
}
}
impl WorkerAction for NewTask {
fn x(&self) -> i32 {
self.x
}
fn | (&self) -> i32 {
self.y
}
fn task_type(&self) -> &TaskType {
&self.task_type
}
fn target(&self) -> Option<HoboKey> {
self.target_hobo_id.map(HoboKey)
}
}
impl WorkerAction for Task {
fn x(&self) -> i32 {
self.x
}
fn y(&self) -> i32 {
self.y
}
fn task_type(&self) -> &TaskType {
&self.task_type
}
fn target(&self) -> Option<HoboKey> {
self.target_hobo_id.map(HoboKey)
}
}
| y | identifier_name |
worker_actions.rs | //! Tasks and task execution of workes
//!
//! Note: This module and submodules will sooner or later need some refactoring.
//! For now, I am still don't really know how I want it to look like.
mod worker_abilities;
mod worker_updates;
use crate::db::DB;
use crate::game_master::event::*;
use crate::game_master::town_worker::*;
use crate::town_view::*;
use actix::prelude::*;
use chrono::offset::TimeZone;
use chrono::{DateTime, Duration, NaiveDateTime, Utc};
use paddlers_shared_lib::api::tasks::*;
use paddlers_shared_lib::game_mechanics::worker::*;
use paddlers_shared_lib::prelude::*;
use worker_abilities::*;
use worker_updates::MutWorkerDBEntity;
trait WorkerAction {
fn x(&self) -> i32;
fn y(&self) -> i32;
fn task_type(&self) -> &TaskType;
fn target(&self) -> Option<HoboKey>;
}
pub struct ValidatedTaskList {
pub new_tasks: Vec<NewTask>,
pub update_tasks: Vec<Task>,
pub village_id: VillageKey,
}
pub(crate) fn validate_task_list(
db: &DB,
tl: &TaskList,
) -> Result<ValidatedTaskList, Box<dyn std::error::Error>> {
let worker_id = tl.worker_id;
// Load relevant data into memory
let mut worker = db.worker_priv(worker_id).ok_or("Worker does not exist")?;
let village_id = VillageKey(worker.home);
let mut town = TownView::load_village(db, village_id);
// check timing and effect of current task interruption
let mut current_task = db
.current_task(worker.key())
.expect("Must have a current task");
let mut timestamp =
interrupt_task(&mut current_task, &worker).ok_or("Cannot interrupt current task.")?;
worker.x = current_task.x;
worker.y = current_task.y;
// iterate tasks and match for task types
let mut tasks = vec![];
for task in tl.tasks.iter() {
// Validate target hobo exists if there is one
if let Some(target_id) = task.target {
db.hobo(HoboKey(target_id)).ok_or("No such hobo id")?;
}
validate_ability(db, task.task_type, worker_id, timestamp)?;
let new_task = NewTask {
worker_id: worker_id.num(),
task_type: task.task_type,
x: task.x as i32,
y: task.y as i32,
start_time: Some(timestamp),
target_hobo_id: task.target,
};
simulate_begin_task(&new_task, &mut town, &mut worker)?;
let duration = simulate_finish_task(&new_task, &mut town, &mut worker)?;
tasks.push(new_task);
timestamp += duration;
}
Ok(ValidatedTaskList {
new_tasks: tasks,
update_tasks: vec![current_task],
village_id,
})
}
pub(crate) fn replace_worker_tasks(
db: &DB,
worker: &Addr<TownWorker>,
worker_id: WorkerKey,
tasks: &[NewTask],
village_id: VillageKey,
) {
db.flush_task_queue(worker_id);
let _inserted = db.insert_tasks(tasks);
let current_task =
execute_worker_tasks(db, worker_id, village_id).expect("Worker has no current task");
if let Some(next_task) = db.earliest_future_task(worker_id) {
let event = Event::WorkerTask {
task_id: current_task.key(),
};
worker
.send(TownWorkerEventMsg(
event,
Utc.from_utc_datetime(&next_task.start_time),
))
.wait()
.expect("Send msg to actor");
}
}
fn interrupt_task(current_task: &mut Task, worker: &Worker) -> Option<NaiveDateTime> {
match current_task.task_type {
TaskType::Idle
| TaskType::ChopTree
| TaskType::Defend
| TaskType::GatherSticks
| TaskType::CollectReward => {
let now = chrono::Utc::now().naive_utc();
Some(now)
}
TaskType::Walk => {
let speed = unit_speed_to_worker_tiles_per_second(worker.speed) as f64;
let time_so_far: Duration = Utc::now().naive_utc() - current_task.start_time;
let steps = (speed * time_so_far.num_microseconds().unwrap() as f64 / 1_000_000.0)
.ceil() as i32;
let total_time = steps as f64 / speed;
let moment = current_task.start_time
+ chrono::Duration::microseconds((total_time * 1_000_000.0) as i64);
let dx = current_task.x - worker.x;
let dy = current_task.y - worker.y;
let x = if dx == 0 {
worker.x
} else if dx < 0 {
worker.x - steps
} else {
worker.x + steps
};
let y = if dy == 0 {
worker.y
} else if dy < 0 {
worker.y - steps
} else {
worker.y + steps
};
// Walking must terminate earlier
current_task.x = x;
current_task.y = y;
Some(moment)
}
TaskType::WelcomeAbility => {
let cast_time = current_task.start_time + AbilityType::Welcome.busy_duration();
Some(cast_time)
}
}
}
/// For the given worker, executes tasks on the DB that are due
fn execute_worker_tasks(db: &DB, worker_id: WorkerKey, village: VillageKey) -> Option<Task> {
let mut tasks = db.past_worker_tasks(worker_id);
let current_task = tasks.pop();
let mut town = TownView::load_village(db, village);
for task in tasks {
if let Err(e) = finish_task(db, task.key(), Some(task), Some(&mut town)) {
println!("Executing task failed: {}", e)
}
}
current_task
}
pub(crate) fn finish_task(
db: &DB,
task_id: TaskKey,
task: Option<Task>,
town: Option<&mut TownView>,
) -> Result<Option<(Event, DateTime<Utc>)>, Box<dyn std::error::Error>> {
let task = task.or_else(|| db.task(task_id));
if let Some(task) = task {
let mut worker = db
.worker_priv(task.worker())
.ok_or("Task references non-existing worker")?;
if let Some(town) = town {
crate::worker_actions::simulate_finish_task(&task, town, &mut worker)?;
apply_task_to_db(db, &task, &mut worker)?;
} else {
let mut town = TownView::load_village(db, VillageKey(worker.home));
crate::worker_actions::simulate_finish_task(&task, &mut town, &mut worker)?;
apply_task_to_db(db, &task, &mut worker)?;
}
db.update_worker(&worker);
db.update_worker_flag_timestamp_now(worker.key(), WorkerFlagType::Work);
db.delete_task(&task);
Ok(Event::load_next_worker_task(db, task.worker()))
} else {
// Already executed.
Ok(None)
}
}
fn apply_task_to_db(db: &DB, task: &Task, worker: &mut Worker) -> Result<(), String> {
match task.task_type {
TaskType::WelcomeAbility => {
let a = AbilityType::Welcome;
let (attribute, strength) = a.apply();
let ne = NewEffect {
hobo_id: task.target().ok_or("Ability must have a target")?.num(),
attribute,
strength: Some(strength),
start_time: None, // default = now
};
db.insert_effect(&ne);
db.update_ability_used_timestamp(WorkerKey(worker.id), a);
*worker.mana.as_mut().unwrap() -= AbilityType::Welcome.mana_cost();
}
TaskType::CollectReward => {
if let Some(building) = db.find_building_by_coordinates(task.x, task.y, worker.home()) {
match building.building_type.reward_exp() {
Some(exp) => {
worker.add_exp(exp);
db.delete_building(&building);
}
None => {
return Err(format!(
"Tried to collect {} as reward",
building.building_type
));
}
}
} else {
return Err(format!("No reward to collect at {},{}", task.x, task.y));
}
}
_ => { /* NOP */ }
}
Ok(())
}
/// (Try to) apply changes to village state that happen when a worker stops doing a given task.
/// E.g. remove unit from building.
/// Returns the time it takes until the task is actually finished.
fn simulate_finish_task<T: WorkerAction>(
task: &T,
town: &mut TownView,
worker: &mut Worker,
) -> Result<Duration, String> {
match task.task_type() {
TaskType::Idle => Ok(Duration::milliseconds(0)),
TaskType::Walk => Ok(worker_walk(
town,
worker,
(task.x() as usize, task.y() as usize),
)?),
TaskType::GatherSticks | TaskType::ChopTree => {
town.state
.register_task_end(*task.task_type())
.map_err(|e| e.to_string())?;
worker_out_of_building(town, worker, (task.x() as usize, task.y() as usize))
}
TaskType::WelcomeAbility => {
let a = AbilityType::Welcome;
let duration = a.busy_duration();
Ok(duration)
}
TaskType::CollectReward => {
// Lookup object to be collected, then delete it in TownView
// Note: DB update is separate
let index = (task.x() as usize, task.y() as usize);
town.state.remove(&index);
Ok(Duration::milliseconds(0))
}
TaskType::Defend => Err("Task not implemented".to_owned()),
}
}
/// (Try to) apply changes to village state that happen when a worker starts a given task.
/// E.g. add unit to a building, or pay required price (only if it is TownView),...
fn simulate_begin_task<T: WorkerAction>(
task: &T,
town: &mut TownView,
worker: &mut Worker,
) -> Result<(), String> {
match task.task_type() {
TaskType::Idle | TaskType::Walk | TaskType::CollectReward => Ok(()),
TaskType::GatherSticks | TaskType::ChopTree => |
TaskType::WelcomeAbility => {
if let Some(mana) = &mut worker.mana {
let cost = AbilityType::Welcome.mana_cost();
if *mana >= cost {
*mana = *mana - cost;
Ok(())
} else {
Err("Not enough mana".to_owned())
}
} else {
Err("Worker has no mana but tries to use welcome ability".to_owned())
}
}
TaskType::Defend => Err("Task not implemented".to_owned()),
}
}
impl WorkerAction for NewTask {
fn x(&self) -> i32 {
self.x
}
fn y(&self) -> i32 {
self.y
}
fn task_type(&self) -> &TaskType {
&self.task_type
}
fn target(&self) -> Option<HoboKey> {
self.target_hobo_id.map(HoboKey)
}
}
impl WorkerAction for Task {
fn x(&self) -> i32 {
self.x
}
fn y(&self) -> i32 {
self.y
}
fn task_type(&self) -> &TaskType {
&self.task_type
}
fn target(&self) -> Option<HoboKey> {
self.target_hobo_id.map(HoboKey)
}
}
| {
town.state
.register_task_begin(*task.task_type())
.map_err(|e| e.to_string())?;
worker_into_building(town, worker, (task.x() as usize, task.y() as usize))
} | conditional_block |
worker_actions.rs | //! Tasks and task execution of workes
//!
//! Note: This module and submodules will sooner or later need some refactoring.
//! For now, I am still don't really know how I want it to look like.
mod worker_abilities;
mod worker_updates;
use crate::db::DB;
use crate::game_master::event::*;
use crate::game_master::town_worker::*;
use crate::town_view::*;
use actix::prelude::*;
use chrono::offset::TimeZone;
use chrono::{DateTime, Duration, NaiveDateTime, Utc};
use paddlers_shared_lib::api::tasks::*;
use paddlers_shared_lib::game_mechanics::worker::*;
use paddlers_shared_lib::prelude::*;
use worker_abilities::*;
use worker_updates::MutWorkerDBEntity;
trait WorkerAction {
fn x(&self) -> i32;
fn y(&self) -> i32;
fn task_type(&self) -> &TaskType;
fn target(&self) -> Option<HoboKey>;
}
pub struct ValidatedTaskList {
pub new_tasks: Vec<NewTask>,
pub update_tasks: Vec<Task>,
pub village_id: VillageKey,
}
pub(crate) fn validate_task_list(
db: &DB,
tl: &TaskList,
) -> Result<ValidatedTaskList, Box<dyn std::error::Error>> {
let worker_id = tl.worker_id;
// Load relevant data into memory
let mut worker = db.worker_priv(worker_id).ok_or("Worker does not exist")?;
let village_id = VillageKey(worker.home);
let mut town = TownView::load_village(db, village_id);
// check timing and effect of current task interruption
let mut current_task = db
.current_task(worker.key())
.expect("Must have a current task");
let mut timestamp =
interrupt_task(&mut current_task, &worker).ok_or("Cannot interrupt current task.")?;
worker.x = current_task.x;
worker.y = current_task.y;
// iterate tasks and match for task types
let mut tasks = vec![];
for task in tl.tasks.iter() {
// Validate target hobo exists if there is one
if let Some(target_id) = task.target {
db.hobo(HoboKey(target_id)).ok_or("No such hobo id")?;
}
validate_ability(db, task.task_type, worker_id, timestamp)?;
let new_task = NewTask {
worker_id: worker_id.num(),
task_type: task.task_type,
x: task.x as i32,
y: task.y as i32,
start_time: Some(timestamp),
target_hobo_id: task.target,
};
simulate_begin_task(&new_task, &mut town, &mut worker)?;
let duration = simulate_finish_task(&new_task, &mut town, &mut worker)?;
tasks.push(new_task);
timestamp += duration;
}
Ok(ValidatedTaskList {
new_tasks: tasks,
update_tasks: vec![current_task],
village_id,
})
}
pub(crate) fn replace_worker_tasks(
db: &DB,
worker: &Addr<TownWorker>,
worker_id: WorkerKey,
tasks: &[NewTask],
village_id: VillageKey,
) {
db.flush_task_queue(worker_id);
let _inserted = db.insert_tasks(tasks);
let current_task =
execute_worker_tasks(db, worker_id, village_id).expect("Worker has no current task");
if let Some(next_task) = db.earliest_future_task(worker_id) {
let event = Event::WorkerTask {
task_id: current_task.key(),
};
worker
.send(TownWorkerEventMsg(
event,
Utc.from_utc_datetime(&next_task.start_time),
))
.wait()
.expect("Send msg to actor");
}
}
fn interrupt_task(current_task: &mut Task, worker: &Worker) -> Option<NaiveDateTime> {
match current_task.task_type {
TaskType::Idle
| TaskType::ChopTree
| TaskType::Defend
| TaskType::GatherSticks
| TaskType::CollectReward => {
let now = chrono::Utc::now().naive_utc();
Some(now)
}
TaskType::Walk => {
let speed = unit_speed_to_worker_tiles_per_second(worker.speed) as f64;
let time_so_far: Duration = Utc::now().naive_utc() - current_task.start_time;
let steps = (speed * time_so_far.num_microseconds().unwrap() as f64 / 1_000_000.0)
.ceil() as i32;
let total_time = steps as f64 / speed;
let moment = current_task.start_time
+ chrono::Duration::microseconds((total_time * 1_000_000.0) as i64);
let dx = current_task.x - worker.x;
let dy = current_task.y - worker.y;
let x = if dx == 0 {
worker.x
} else if dx < 0 {
worker.x - steps
} else {
worker.x + steps
};
let y = if dy == 0 {
worker.y
} else if dy < 0 {
worker.y - steps
} else {
worker.y + steps
};
// Walking must terminate earlier
current_task.x = x;
current_task.y = y;
Some(moment)
}
TaskType::WelcomeAbility => {
let cast_time = current_task.start_time + AbilityType::Welcome.busy_duration();
Some(cast_time)
}
}
}
/// For the given worker, executes tasks on the DB that are due
fn execute_worker_tasks(db: &DB, worker_id: WorkerKey, village: VillageKey) -> Option<Task> {
let mut tasks = db.past_worker_tasks(worker_id);
let current_task = tasks.pop();
let mut town = TownView::load_village(db, village);
for task in tasks {
if let Err(e) = finish_task(db, task.key(), Some(task), Some(&mut town)) {
println!("Executing task failed: {}", e)
}
}
current_task
}
pub(crate) fn finish_task(
db: &DB,
task_id: TaskKey,
task: Option<Task>,
town: Option<&mut TownView>,
) -> Result<Option<(Event, DateTime<Utc>)>, Box<dyn std::error::Error>> {
let task = task.or_else(|| db.task(task_id));
if let Some(task) = task {
let mut worker = db
.worker_priv(task.worker())
.ok_or("Task references non-existing worker")?;
if let Some(town) = town {
crate::worker_actions::simulate_finish_task(&task, town, &mut worker)?;
apply_task_to_db(db, &task, &mut worker)?;
} else {
let mut town = TownView::load_village(db, VillageKey(worker.home));
crate::worker_actions::simulate_finish_task(&task, &mut town, &mut worker)?;
apply_task_to_db(db, &task, &mut worker)?;
}
db.update_worker(&worker);
db.update_worker_flag_timestamp_now(worker.key(), WorkerFlagType::Work);
db.delete_task(&task);
Ok(Event::load_next_worker_task(db, task.worker()))
} else {
// Already executed.
Ok(None)
}
}
fn apply_task_to_db(db: &DB, task: &Task, worker: &mut Worker) -> Result<(), String> {
match task.task_type {
TaskType::WelcomeAbility => {
let a = AbilityType::Welcome;
let (attribute, strength) = a.apply();
let ne = NewEffect {
hobo_id: task.target().ok_or("Ability must have a target")?.num(),
attribute,
strength: Some(strength),
start_time: None, // default = now
};
db.insert_effect(&ne);
db.update_ability_used_timestamp(WorkerKey(worker.id), a);
*worker.mana.as_mut().unwrap() -= AbilityType::Welcome.mana_cost();
}
TaskType::CollectReward => {
if let Some(building) = db.find_building_by_coordinates(task.x, task.y, worker.home()) {
match building.building_type.reward_exp() {
Some(exp) => {
worker.add_exp(exp);
db.delete_building(&building);
}
None => {
return Err(format!(
"Tried to collect {} as reward",
building.building_type
));
}
}
} else {
return Err(format!("No reward to collect at {},{}", task.x, task.y));
}
}
_ => { /* NOP */ }
}
Ok(())
}
/// (Try to) apply changes to village state that happen when a worker stops doing a given task.
/// E.g. remove unit from building.
/// Returns the time it takes until the task is actually finished.
fn simulate_finish_task<T: WorkerAction>(
task: &T,
town: &mut TownView,
worker: &mut Worker,
) -> Result<Duration, String> {
match task.task_type() {
TaskType::Idle => Ok(Duration::milliseconds(0)),
TaskType::Walk => Ok(worker_walk(
town,
worker,
(task.x() as usize, task.y() as usize),
)?),
TaskType::GatherSticks | TaskType::ChopTree => {
town.state
.register_task_end(*task.task_type())
.map_err(|e| e.to_string())?;
worker_out_of_building(town, worker, (task.x() as usize, task.y() as usize))
}
TaskType::WelcomeAbility => {
let a = AbilityType::Welcome;
let duration = a.busy_duration();
Ok(duration)
}
TaskType::CollectReward => {
// Lookup object to be collected, then delete it in TownView
// Note: DB update is separate
let index = (task.x() as usize, task.y() as usize);
town.state.remove(&index);
Ok(Duration::milliseconds(0))
}
TaskType::Defend => Err("Task not implemented".to_owned()),
}
}
/// (Try to) apply changes to village state that happen when a worker starts a given task.
/// E.g. add unit to a building, or pay required price (only if it is TownView),...
fn simulate_begin_task<T: WorkerAction>(
task: &T,
town: &mut TownView,
worker: &mut Worker,
) -> Result<(), String> {
match task.task_type() {
TaskType::Idle | TaskType::Walk | TaskType::CollectReward => Ok(()),
TaskType::GatherSticks | TaskType::ChopTree => {
town.state
.register_task_begin(*task.task_type())
.map_err(|e| e.to_string())?;
worker_into_building(town, worker, (task.x() as usize, task.y() as usize))
}
TaskType::WelcomeAbility => {
if let Some(mana) = &mut worker.mana {
let cost = AbilityType::Welcome.mana_cost();
if *mana >= cost {
*mana = *mana - cost;
Ok(())
} else {
Err("Not enough mana".to_owned())
}
} else {
Err("Worker has no mana but tries to use welcome ability".to_owned())
}
}
TaskType::Defend => Err("Task not implemented".to_owned()),
}
}
impl WorkerAction for NewTask {
fn x(&self) -> i32 {
self.x
}
fn y(&self) -> i32 {
self.y
}
fn task_type(&self) -> &TaskType {
&self.task_type
}
fn target(&self) -> Option<HoboKey> {
self.target_hobo_id.map(HoboKey)
}
}
impl WorkerAction for Task {
fn x(&self) -> i32 |
fn y(&self) -> i32 {
self.y
}
fn task_type(&self) -> &TaskType {
&self.task_type
}
fn target(&self) -> Option<HoboKey> {
self.target_hobo_id.map(HoboKey)
}
}
| {
self.x
} | identifier_body |
worker_actions.rs | //! Tasks and task execution of workes
//!
//! Note: This module and submodules will sooner or later need some refactoring.
//! For now, I am still don't really know how I want it to look like.
mod worker_abilities;
mod worker_updates;
use crate::db::DB;
use crate::game_master::event::*;
use crate::game_master::town_worker::*;
use crate::town_view::*;
use actix::prelude::*;
use chrono::offset::TimeZone;
use chrono::{DateTime, Duration, NaiveDateTime, Utc};
use paddlers_shared_lib::api::tasks::*;
use paddlers_shared_lib::game_mechanics::worker::*;
use paddlers_shared_lib::prelude::*;
use worker_abilities::*;
use worker_updates::MutWorkerDBEntity;
trait WorkerAction {
fn x(&self) -> i32;
fn y(&self) -> i32;
fn task_type(&self) -> &TaskType;
fn target(&self) -> Option<HoboKey>;
}
pub struct ValidatedTaskList {
pub new_tasks: Vec<NewTask>,
pub update_tasks: Vec<Task>,
pub village_id: VillageKey,
}
pub(crate) fn validate_task_list(
db: &DB,
tl: &TaskList,
) -> Result<ValidatedTaskList, Box<dyn std::error::Error>> {
let worker_id = tl.worker_id;
// Load relevant data into memory
let mut worker = db.worker_priv(worker_id).ok_or("Worker does not exist")?;
let village_id = VillageKey(worker.home);
let mut town = TownView::load_village(db, village_id);
// check timing and effect of current task interruption
let mut current_task = db
.current_task(worker.key())
.expect("Must have a current task");
let mut timestamp =
interrupt_task(&mut current_task, &worker).ok_or("Cannot interrupt current task.")?;
worker.x = current_task.x;
worker.y = current_task.y;
// iterate tasks and match for task types | // Validate target hobo exists if there is one
if let Some(target_id) = task.target {
db.hobo(HoboKey(target_id)).ok_or("No such hobo id")?;
}
validate_ability(db, task.task_type, worker_id, timestamp)?;
let new_task = NewTask {
worker_id: worker_id.num(),
task_type: task.task_type,
x: task.x as i32,
y: task.y as i32,
start_time: Some(timestamp),
target_hobo_id: task.target,
};
simulate_begin_task(&new_task, &mut town, &mut worker)?;
let duration = simulate_finish_task(&new_task, &mut town, &mut worker)?;
tasks.push(new_task);
timestamp += duration;
}
Ok(ValidatedTaskList {
new_tasks: tasks,
update_tasks: vec![current_task],
village_id,
})
}
pub(crate) fn replace_worker_tasks(
db: &DB,
worker: &Addr<TownWorker>,
worker_id: WorkerKey,
tasks: &[NewTask],
village_id: VillageKey,
) {
db.flush_task_queue(worker_id);
let _inserted = db.insert_tasks(tasks);
let current_task =
execute_worker_tasks(db, worker_id, village_id).expect("Worker has no current task");
if let Some(next_task) = db.earliest_future_task(worker_id) {
let event = Event::WorkerTask {
task_id: current_task.key(),
};
worker
.send(TownWorkerEventMsg(
event,
Utc.from_utc_datetime(&next_task.start_time),
))
.wait()
.expect("Send msg to actor");
}
}
fn interrupt_task(current_task: &mut Task, worker: &Worker) -> Option<NaiveDateTime> {
match current_task.task_type {
TaskType::Idle
| TaskType::ChopTree
| TaskType::Defend
| TaskType::GatherSticks
| TaskType::CollectReward => {
let now = chrono::Utc::now().naive_utc();
Some(now)
}
TaskType::Walk => {
let speed = unit_speed_to_worker_tiles_per_second(worker.speed) as f64;
let time_so_far: Duration = Utc::now().naive_utc() - current_task.start_time;
let steps = (speed * time_so_far.num_microseconds().unwrap() as f64 / 1_000_000.0)
.ceil() as i32;
let total_time = steps as f64 / speed;
let moment = current_task.start_time
+ chrono::Duration::microseconds((total_time * 1_000_000.0) as i64);
let dx = current_task.x - worker.x;
let dy = current_task.y - worker.y;
let x = if dx == 0 {
worker.x
} else if dx < 0 {
worker.x - steps
} else {
worker.x + steps
};
let y = if dy == 0 {
worker.y
} else if dy < 0 {
worker.y - steps
} else {
worker.y + steps
};
// Walking must terminate earlier
current_task.x = x;
current_task.y = y;
Some(moment)
}
TaskType::WelcomeAbility => {
let cast_time = current_task.start_time + AbilityType::Welcome.busy_duration();
Some(cast_time)
}
}
}
/// For the given worker, executes tasks on the DB that are due
fn execute_worker_tasks(db: &DB, worker_id: WorkerKey, village: VillageKey) -> Option<Task> {
let mut tasks = db.past_worker_tasks(worker_id);
let current_task = tasks.pop();
let mut town = TownView::load_village(db, village);
for task in tasks {
if let Err(e) = finish_task(db, task.key(), Some(task), Some(&mut town)) {
println!("Executing task failed: {}", e)
}
}
current_task
}
pub(crate) fn finish_task(
db: &DB,
task_id: TaskKey,
task: Option<Task>,
town: Option<&mut TownView>,
) -> Result<Option<(Event, DateTime<Utc>)>, Box<dyn std::error::Error>> {
let task = task.or_else(|| db.task(task_id));
if let Some(task) = task {
let mut worker = db
.worker_priv(task.worker())
.ok_or("Task references non-existing worker")?;
if let Some(town) = town {
crate::worker_actions::simulate_finish_task(&task, town, &mut worker)?;
apply_task_to_db(db, &task, &mut worker)?;
} else {
let mut town = TownView::load_village(db, VillageKey(worker.home));
crate::worker_actions::simulate_finish_task(&task, &mut town, &mut worker)?;
apply_task_to_db(db, &task, &mut worker)?;
}
db.update_worker(&worker);
db.update_worker_flag_timestamp_now(worker.key(), WorkerFlagType::Work);
db.delete_task(&task);
Ok(Event::load_next_worker_task(db, task.worker()))
} else {
// Already executed.
Ok(None)
}
}
fn apply_task_to_db(db: &DB, task: &Task, worker: &mut Worker) -> Result<(), String> {
match task.task_type {
TaskType::WelcomeAbility => {
let a = AbilityType::Welcome;
let (attribute, strength) = a.apply();
let ne = NewEffect {
hobo_id: task.target().ok_or("Ability must have a target")?.num(),
attribute,
strength: Some(strength),
start_time: None, // default = now
};
db.insert_effect(&ne);
db.update_ability_used_timestamp(WorkerKey(worker.id), a);
*worker.mana.as_mut().unwrap() -= AbilityType::Welcome.mana_cost();
}
TaskType::CollectReward => {
if let Some(building) = db.find_building_by_coordinates(task.x, task.y, worker.home()) {
match building.building_type.reward_exp() {
Some(exp) => {
worker.add_exp(exp);
db.delete_building(&building);
}
None => {
return Err(format!(
"Tried to collect {} as reward",
building.building_type
));
}
}
} else {
return Err(format!("No reward to collect at {},{}", task.x, task.y));
}
}
_ => { /* NOP */ }
}
Ok(())
}
/// (Try to) apply changes to village state that happen when a worker stops doing a given task.
/// E.g. remove unit from building.
/// Returns the time it takes until the task is actually finished.
fn simulate_finish_task<T: WorkerAction>(
task: &T,
town: &mut TownView,
worker: &mut Worker,
) -> Result<Duration, String> {
match task.task_type() {
TaskType::Idle => Ok(Duration::milliseconds(0)),
TaskType::Walk => Ok(worker_walk(
town,
worker,
(task.x() as usize, task.y() as usize),
)?),
TaskType::GatherSticks | TaskType::ChopTree => {
town.state
.register_task_end(*task.task_type())
.map_err(|e| e.to_string())?;
worker_out_of_building(town, worker, (task.x() as usize, task.y() as usize))
}
TaskType::WelcomeAbility => {
let a = AbilityType::Welcome;
let duration = a.busy_duration();
Ok(duration)
}
TaskType::CollectReward => {
// Lookup object to be collected, then delete it in TownView
// Note: DB update is separate
let index = (task.x() as usize, task.y() as usize);
town.state.remove(&index);
Ok(Duration::milliseconds(0))
}
TaskType::Defend => Err("Task not implemented".to_owned()),
}
}
/// (Try to) apply changes to village state that happen when a worker starts a given task.
/// E.g. add unit to a building, or pay required price (only if it is TownView),...
fn simulate_begin_task<T: WorkerAction>(
task: &T,
town: &mut TownView,
worker: &mut Worker,
) -> Result<(), String> {
match task.task_type() {
TaskType::Idle | TaskType::Walk | TaskType::CollectReward => Ok(()),
TaskType::GatherSticks | TaskType::ChopTree => {
town.state
.register_task_begin(*task.task_type())
.map_err(|e| e.to_string())?;
worker_into_building(town, worker, (task.x() as usize, task.y() as usize))
}
TaskType::WelcomeAbility => {
if let Some(mana) = &mut worker.mana {
let cost = AbilityType::Welcome.mana_cost();
if *mana >= cost {
*mana = *mana - cost;
Ok(())
} else {
Err("Not enough mana".to_owned())
}
} else {
Err("Worker has no mana but tries to use welcome ability".to_owned())
}
}
TaskType::Defend => Err("Task not implemented".to_owned()),
}
}
impl WorkerAction for NewTask {
fn x(&self) -> i32 {
self.x
}
fn y(&self) -> i32 {
self.y
}
fn task_type(&self) -> &TaskType {
&self.task_type
}
fn target(&self) -> Option<HoboKey> {
self.target_hobo_id.map(HoboKey)
}
}
impl WorkerAction for Task {
fn x(&self) -> i32 {
self.x
}
fn y(&self) -> i32 {
self.y
}
fn task_type(&self) -> &TaskType {
&self.task_type
}
fn target(&self) -> Option<HoboKey> {
self.target_hobo_id.map(HoboKey)
}
} | let mut tasks = vec![];
for task in tl.tasks.iter() { | random_line_split |
lockfree.rs | use crate::{
arenas::atomic_bucket::{AtomicBucket, AtomicBucketList},
Capacity, LassoError, LassoErrorKind, LassoResult, MemoryLimits,
};
use core::{
fmt::{self, Debug},
num::NonZeroUsize,
slice, str,
sync::atomic::{AtomicUsize, Ordering},
};
/// An arena allocator that dynamically grows in size when needed, allocating memory in large chunks
pub(crate) struct LockfreeArena {
/// All the internal buckets, storing all allocated and unallocated items
// TODO: We could keep around a second list of buckets to store filled buckets
// in to keep us from having to iterate over them, need more tests to
// see what the impact of that is
buckets: AtomicBucketList,
/// The default capacity of each bucket
///
/// Invariant: `bucket_capacity` must never be zero
bucket_capacity: AtomicUsize,
memory_usage: AtomicUsize,
max_memory_usage: AtomicUsize,
}
impl LockfreeArena {
/// Create a new Arena with the default bucket size of 4096 bytes
pub fn new(capacity: NonZeroUsize, max_memory_usage: usize) -> LassoResult<Self> {
Ok(Self {
// Allocate one bucket
buckets: AtomicBucketList::new(capacity)?,
bucket_capacity: AtomicUsize::new(capacity.get()),
// The current capacity is whatever size the bucket we just allocated is
memory_usage: AtomicUsize::new(capacity.get()),
max_memory_usage: AtomicUsize::new(max_memory_usage),
})
}
#[inline]
pub(crate) fn current_memory_usage(&self) -> usize {
self.memory_usage.load(Ordering::Relaxed)
}
#[inline]
pub(crate) fn set_max_memory_usage(&self, max_memory_usage: usize) {
self.max_memory_usage
.store(max_memory_usage, Ordering::Relaxed);
}
#[inline]
pub(crate) fn get_max_memory_usage(&self) -> usize {
self.max_memory_usage.load(Ordering::Relaxed)
}
fn set_bucket_capacity(&self, capacity: usize) {
debug_assert_ne!(capacity, 0);
self.bucket_capacity.store(capacity, Ordering::Relaxed);
}
/// Doesn't actually allocate anything, but increments `self.memory_usage` and returns `None` if
/// the attempted amount surpasses `max_memory_usage`
// TODO: Make this return a `Result`
fn | (&self, requested_mem: usize) -> LassoResult<()> {
if self.memory_usage.load(Ordering::Relaxed) + requested_mem
> self.max_memory_usage.load(Ordering::Relaxed)
{
Err(LassoError::new(LassoErrorKind::MemoryLimitReached))
} else {
self.memory_usage
.fetch_add(requested_mem, Ordering::Relaxed);
Ok(())
}
}
/// Store a slice in the Arena, returning `None` if memory is exhausted
///
/// # Safety
///
/// The reference passed back must be dropped before the arena that created it is
///
pub unsafe fn store_str(&self, string: &str) -> LassoResult<&'static str> {
// If the string is empty, simply return an empty string.
// This ensures that only strings with lengths greater
// than zero will be allocated within the arena
if string.is_empty() {
return Ok("");
}
let slice = string.as_bytes();
debug_assert_ne!(slice.len(), 0);
// Iterate over all of the buckets within the list while attempting to find one
// that has enough space to fit our string within it
//
// This is a tradeoff between allocation speed and memory usage. As-is we prioritize
// allocation speed in exchange for potentially missing possible reuse situations
// and then allocating more memory than is strictly necessary. In practice this shouldn't
// really matter, but it's worth that the opposite tradeoff can be made by adding bounded
// retries within this loop, the worst-case performance suffers in exchange for potentially
// better memory usage.
for bucket in self.buckets.iter() {
if let Ok(start) = bucket.try_inc_length(slice.len()) {
// Safety: We now have exclusive access to `bucket[start..start + slice.len()]`
let allocated = unsafe { bucket.slice_mut(start) };
// Copy the given slice into the allocation
unsafe { allocated.copy_from_nonoverlapping(slice.as_ptr(), slice.len()) };
// Return the successfully allocated string
let string = unsafe {
str::from_utf8_unchecked(slice::from_raw_parts(allocated, slice.len()))
};
return Ok(string);
}
// Otherwise the bucket doesn't have sufficient capacity for the string
// so we carry on searching through allocated buckets
}
// If we couldn't find a pre-existing bucket with enough room in it, allocate our own bucket
let next_capacity = self.bucket_capacity.load(Ordering::Relaxed) * 2;
debug_assert_ne!(next_capacity, 0);
// If the current string's length is greater than the doubled current capacity, allocate a bucket exactly the
// size of the large string and push it back in the buckets vector. This ensures that obscenely large strings will
// not permanently affect the resource consumption of the interner
if slice.len() > next_capacity {
// Check that we haven't exhausted our memory limit
self.allocate_memory(slice.len())?;
// Safety: `len` will never be zero since we explicitly handled zero-length strings
// at the beginning of the function
let non_zero_len = unsafe { NonZeroUsize::new_unchecked(slice.len()) };
debug_assert_ne!(slice.len(), 0);
let mut bucket = AtomicBucket::with_capacity(non_zero_len)?;
// Safety: The new bucket will have exactly enough room for the string and we have
// exclusive access to the bucket since we just created it
let allocated_string = unsafe { bucket.push_slice(slice) };
self.buckets.push_front(bucket.into_ref());
Ok(allocated_string)
} else {
let memory_usage = self.current_memory_usage();
let max_memory_usage = self.get_max_memory_usage();
// If trying to use the doubled capacity will surpass our memory limit, just allocate as much as we can
if memory_usage + next_capacity > max_memory_usage {
let remaining_memory = max_memory_usage.saturating_sub(memory_usage);
// Check that we haven't exhausted our memory limit
self.allocate_memory(remaining_memory)?;
// Set the capacity to twice of what it currently is to allow for fewer allocations as more strings are interned
let mut bucket = AtomicBucket::with_capacity(
NonZeroUsize::new(remaining_memory)
.ok_or_else(|| LassoError::new(LassoErrorKind::MemoryLimitReached))?,
)?;
// Safety: The new bucket will have exactly enough room for the string and we have
// exclusive access to the bucket since we just created it
let allocated_string = unsafe { bucket.push_slice(slice) };
// TODO: Push the bucket to the back or something so that we can get it somewhat out
// of the search path, reduce the `n` in the `O(n)` list traversal
self.buckets.push_front(bucket.into_ref());
Ok(allocated_string)
// Otherwise just allocate a normal doubled bucket
} else {
// Check that we haven't exhausted our memory limit
self.allocate_memory(next_capacity)?;
// Set the capacity to twice of what it currently is to allow for fewer allocations as more strings are interned
self.set_bucket_capacity(next_capacity);
// Safety: `next_capacity` will never be zero
let capacity = unsafe { NonZeroUsize::new_unchecked(next_capacity) };
debug_assert_ne!(next_capacity, 0);
let mut bucket = AtomicBucket::with_capacity(capacity)?;
// Safety: The new bucket will have enough room for the string
let allocated_string = unsafe { bucket.push_slice(slice) };
self.buckets.push_front(bucket.into_ref());
Ok(allocated_string)
}
}
}
}
impl Default for LockfreeArena {
fn default() -> Self {
Self::new(
Capacity::default().bytes,
MemoryLimits::default().max_memory_usage,
)
.expect("failed to create default arena")
}
}
impl Debug for LockfreeArena {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
struct TotalBuckets(usize);
impl Debug for TotalBuckets {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.0 == 1 {
f.write_str("...1 bucket")
} else {
write!(f, "...{} buckets", self.0)
}
}
}
f.debug_struct("Arena")
.field("buckets", &TotalBuckets(self.buckets.len()))
.field(
"bucket_capacity",
&self.bucket_capacity.load(Ordering::Relaxed),
)
.field("memory_usage", &self.memory_usage.load(Ordering::Relaxed))
.field(
"max_memory_usage",
&self.max_memory_usage.load(Ordering::Relaxed),
)
.finish()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn string() {
let arena = LockfreeArena::default();
unsafe {
let idx = arena.store_str("test");
assert_eq!(idx, Ok("test"));
}
}
#[test]
fn empty_str() {
let arena = LockfreeArena::default();
unsafe {
let zst = arena.store_str("");
let zst1 = arena.store_str("");
let zst2 = arena.store_str("");
assert_eq!(zst, Ok(""));
assert_eq!(zst1, Ok(""));
assert_eq!(zst2, Ok(""));
}
}
#[test]
fn exponential_allocations() {
let arena = LockfreeArena::default();
let mut len = 4096;
for _ in 0..10 {
let large_string = "a".repeat(len);
let arena_string = unsafe { arena.store_str(&large_string) };
assert_eq!(arena_string, Ok(large_string.as_str()));
len *= 2;
}
}
#[test]
fn memory_exhausted() {
let arena = LockfreeArena::new(NonZeroUsize::new(10).unwrap(), 10).unwrap();
unsafe {
assert!(arena.store_str("0123456789").is_ok());
// ZSTs take up zero bytes
arena.store_str("").unwrap();
let err = arena.store_str("a").unwrap_err();
assert!(err.kind().is_memory_limit());
let err = arena.store_str("dfgsagdfgsdf").unwrap_err();
assert!(err.kind().is_memory_limit());
}
}
#[test]
fn allocate_too_much() {
let arena = LockfreeArena::new(NonZeroUsize::new(1).unwrap(), 10).unwrap();
unsafe {
let err = arena.store_str("abcdefghijklmnopqrstuvwxyz").unwrap_err();
assert!(err.kind().is_memory_limit());
}
}
#[test]
fn allocate_more_than_double() {
let arena = LockfreeArena::new(NonZeroUsize::new(1).unwrap(), 1000).unwrap();
unsafe {
assert!(arena.store_str("abcdefghijklmnopqrstuvwxyz").is_ok());
}
}
}
| allocate_memory | identifier_name |
lockfree.rs | use crate::{
arenas::atomic_bucket::{AtomicBucket, AtomicBucketList},
Capacity, LassoError, LassoErrorKind, LassoResult, MemoryLimits,
};
use core::{
fmt::{self, Debug},
num::NonZeroUsize,
slice, str,
sync::atomic::{AtomicUsize, Ordering},
};
/// An arena allocator that dynamically grows in size when needed, allocating memory in large chunks
pub(crate) struct LockfreeArena {
/// All the internal buckets, storing all allocated and unallocated items
// TODO: We could keep around a second list of buckets to store filled buckets
// in to keep us from having to iterate over them, need more tests to
// see what the impact of that is
buckets: AtomicBucketList,
/// The default capacity of each bucket
///
/// Invariant: `bucket_capacity` must never be zero
bucket_capacity: AtomicUsize,
memory_usage: AtomicUsize,
max_memory_usage: AtomicUsize,
}
impl LockfreeArena {
/// Create a new Arena with the default bucket size of 4096 bytes
pub fn new(capacity: NonZeroUsize, max_memory_usage: usize) -> LassoResult<Self> {
Ok(Self {
// Allocate one bucket
buckets: AtomicBucketList::new(capacity)?,
bucket_capacity: AtomicUsize::new(capacity.get()),
// The current capacity is whatever size the bucket we just allocated is
memory_usage: AtomicUsize::new(capacity.get()),
max_memory_usage: AtomicUsize::new(max_memory_usage),
})
}
#[inline]
pub(crate) fn current_memory_usage(&self) -> usize {
self.memory_usage.load(Ordering::Relaxed)
}
#[inline]
pub(crate) fn set_max_memory_usage(&self, max_memory_usage: usize) {
self.max_memory_usage
.store(max_memory_usage, Ordering::Relaxed);
}
#[inline]
pub(crate) fn get_max_memory_usage(&self) -> usize {
self.max_memory_usage.load(Ordering::Relaxed)
}
fn set_bucket_capacity(&self, capacity: usize) {
debug_assert_ne!(capacity, 0);
self.bucket_capacity.store(capacity, Ordering::Relaxed);
}
/// Doesn't actually allocate anything, but increments `self.memory_usage` and returns `None` if
/// the attempted amount surpasses `max_memory_usage`
// TODO: Make this return a `Result`
fn allocate_memory(&self, requested_mem: usize) -> LassoResult<()> {
if self.memory_usage.load(Ordering::Relaxed) + requested_mem
> self.max_memory_usage.load(Ordering::Relaxed)
{
Err(LassoError::new(LassoErrorKind::MemoryLimitReached))
} else {
self.memory_usage
.fetch_add(requested_mem, Ordering::Relaxed);
Ok(())
}
}
/// Store a slice in the Arena, returning `None` if memory is exhausted
///
/// # Safety
///
/// The reference passed back must be dropped before the arena that created it is
///
pub unsafe fn store_str(&self, string: &str) -> LassoResult<&'static str> {
// If the string is empty, simply return an empty string.
// This ensures that only strings with lengths greater
// than zero will be allocated within the arena
if string.is_empty() {
return Ok("");
}
let slice = string.as_bytes();
debug_assert_ne!(slice.len(), 0);
// Iterate over all of the buckets within the list while attempting to find one
// that has enough space to fit our string within it
//
// This is a tradeoff between allocation speed and memory usage. As-is we prioritize
// allocation speed in exchange for potentially missing possible reuse situations
// and then allocating more memory than is strictly necessary. In practice this shouldn't
// really matter, but it's worth that the opposite tradeoff can be made by adding bounded
// retries within this loop, the worst-case performance suffers in exchange for potentially
// better memory usage.
for bucket in self.buckets.iter() {
if let Ok(start) = bucket.try_inc_length(slice.len()) {
// Safety: We now have exclusive access to `bucket[start..start + slice.len()]`
let allocated = unsafe { bucket.slice_mut(start) };
// Copy the given slice into the allocation
unsafe { allocated.copy_from_nonoverlapping(slice.as_ptr(), slice.len()) };
// Return the successfully allocated string
let string = unsafe {
str::from_utf8_unchecked(slice::from_raw_parts(allocated, slice.len()))
};
return Ok(string);
}
// Otherwise the bucket doesn't have sufficient capacity for the string
// so we carry on searching through allocated buckets
}
// If we couldn't find a pre-existing bucket with enough room in it, allocate our own bucket
let next_capacity = self.bucket_capacity.load(Ordering::Relaxed) * 2;
debug_assert_ne!(next_capacity, 0);
// If the current string's length is greater than the doubled current capacity, allocate a bucket exactly the
// size of the large string and push it back in the buckets vector. This ensures that obscenely large strings will
// not permanently affect the resource consumption of the interner
if slice.len() > next_capacity {
// Check that we haven't exhausted our memory limit
self.allocate_memory(slice.len())?;
// Safety: `len` will never be zero since we explicitly handled zero-length strings
// at the beginning of the function
let non_zero_len = unsafe { NonZeroUsize::new_unchecked(slice.len()) };
debug_assert_ne!(slice.len(), 0);
let mut bucket = AtomicBucket::with_capacity(non_zero_len)?;
// Safety: The new bucket will have exactly enough room for the string and we have
// exclusive access to the bucket since we just created it
let allocated_string = unsafe { bucket.push_slice(slice) };
self.buckets.push_front(bucket.into_ref());
Ok(allocated_string)
} else | // TODO: Push the bucket to the back or something so that we can get it somewhat out
// of the search path, reduce the `n` in the `O(n)` list traversal
self.buckets.push_front(bucket.into_ref());
Ok(allocated_string)
// Otherwise just allocate a normal doubled bucket
} else {
// Check that we haven't exhausted our memory limit
self.allocate_memory(next_capacity)?;
// Set the capacity to twice of what it currently is to allow for fewer allocations as more strings are interned
self.set_bucket_capacity(next_capacity);
// Safety: `next_capacity` will never be zero
let capacity = unsafe { NonZeroUsize::new_unchecked(next_capacity) };
debug_assert_ne!(next_capacity, 0);
let mut bucket = AtomicBucket::with_capacity(capacity)?;
// Safety: The new bucket will have enough room for the string
let allocated_string = unsafe { bucket.push_slice(slice) };
self.buckets.push_front(bucket.into_ref());
Ok(allocated_string)
}
}
}
}
impl Default for LockfreeArena {
fn default() -> Self {
Self::new(
Capacity::default().bytes,
MemoryLimits::default().max_memory_usage,
)
.expect("failed to create default arena")
}
}
impl Debug for LockfreeArena {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
struct TotalBuckets(usize);
impl Debug for TotalBuckets {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.0 == 1 {
f.write_str("...1 bucket")
} else {
write!(f, "...{} buckets", self.0)
}
}
}
f.debug_struct("Arena")
.field("buckets", &TotalBuckets(self.buckets.len()))
.field(
"bucket_capacity",
&self.bucket_capacity.load(Ordering::Relaxed),
)
.field("memory_usage", &self.memory_usage.load(Ordering::Relaxed))
.field(
"max_memory_usage",
&self.max_memory_usage.load(Ordering::Relaxed),
)
.finish()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn string() {
let arena = LockfreeArena::default();
unsafe {
let idx = arena.store_str("test");
assert_eq!(idx, Ok("test"));
}
}
#[test]
fn empty_str() {
let arena = LockfreeArena::default();
unsafe {
let zst = arena.store_str("");
let zst1 = arena.store_str("");
let zst2 = arena.store_str("");
assert_eq!(zst, Ok(""));
assert_eq!(zst1, Ok(""));
assert_eq!(zst2, Ok(""));
}
}
#[test]
fn exponential_allocations() {
let arena = LockfreeArena::default();
let mut len = 4096;
for _ in 0..10 {
let large_string = "a".repeat(len);
let arena_string = unsafe { arena.store_str(&large_string) };
assert_eq!(arena_string, Ok(large_string.as_str()));
len *= 2;
}
}
#[test]
fn memory_exhausted() {
let arena = LockfreeArena::new(NonZeroUsize::new(10).unwrap(), 10).unwrap();
unsafe {
assert!(arena.store_str("0123456789").is_ok());
// ZSTs take up zero bytes
arena.store_str("").unwrap();
let err = arena.store_str("a").unwrap_err();
assert!(err.kind().is_memory_limit());
let err = arena.store_str("dfgsagdfgsdf").unwrap_err();
assert!(err.kind().is_memory_limit());
}
}
#[test]
fn allocate_too_much() {
let arena = LockfreeArena::new(NonZeroUsize::new(1).unwrap(), 10).unwrap();
unsafe {
let err = arena.store_str("abcdefghijklmnopqrstuvwxyz").unwrap_err();
assert!(err.kind().is_memory_limit());
}
}
#[test]
fn allocate_more_than_double() {
let arena = LockfreeArena::new(NonZeroUsize::new(1).unwrap(), 1000).unwrap();
unsafe {
assert!(arena.store_str("abcdefghijklmnopqrstuvwxyz").is_ok());
}
}
}
| {
let memory_usage = self.current_memory_usage();
let max_memory_usage = self.get_max_memory_usage();
// If trying to use the doubled capacity will surpass our memory limit, just allocate as much as we can
if memory_usage + next_capacity > max_memory_usage {
let remaining_memory = max_memory_usage.saturating_sub(memory_usage);
// Check that we haven't exhausted our memory limit
self.allocate_memory(remaining_memory)?;
// Set the capacity to twice of what it currently is to allow for fewer allocations as more strings are interned
let mut bucket = AtomicBucket::with_capacity(
NonZeroUsize::new(remaining_memory)
.ok_or_else(|| LassoError::new(LassoErrorKind::MemoryLimitReached))?,
)?;
// Safety: The new bucket will have exactly enough room for the string and we have
// exclusive access to the bucket since we just created it
let allocated_string = unsafe { bucket.push_slice(slice) }; | conditional_block |
lockfree.rs | use crate::{
arenas::atomic_bucket::{AtomicBucket, AtomicBucketList},
Capacity, LassoError, LassoErrorKind, LassoResult, MemoryLimits,
};
use core::{
fmt::{self, Debug},
num::NonZeroUsize,
slice, str,
sync::atomic::{AtomicUsize, Ordering},
};
/// An arena allocator that dynamically grows in size when needed, allocating memory in large chunks
pub(crate) struct LockfreeArena {
/// All the internal buckets, storing all allocated and unallocated items
// TODO: We could keep around a second list of buckets to store filled buckets
// in to keep us from having to iterate over them, need more tests to
// see what the impact of that is
buckets: AtomicBucketList,
/// The default capacity of each bucket
///
/// Invariant: `bucket_capacity` must never be zero | memory_usage: AtomicUsize,
max_memory_usage: AtomicUsize,
}
impl LockfreeArena {
/// Create a new Arena with the default bucket size of 4096 bytes
pub fn new(capacity: NonZeroUsize, max_memory_usage: usize) -> LassoResult<Self> {
Ok(Self {
// Allocate one bucket
buckets: AtomicBucketList::new(capacity)?,
bucket_capacity: AtomicUsize::new(capacity.get()),
// The current capacity is whatever size the bucket we just allocated is
memory_usage: AtomicUsize::new(capacity.get()),
max_memory_usage: AtomicUsize::new(max_memory_usage),
})
}
#[inline]
pub(crate) fn current_memory_usage(&self) -> usize {
self.memory_usage.load(Ordering::Relaxed)
}
#[inline]
pub(crate) fn set_max_memory_usage(&self, max_memory_usage: usize) {
self.max_memory_usage
.store(max_memory_usage, Ordering::Relaxed);
}
#[inline]
pub(crate) fn get_max_memory_usage(&self) -> usize {
self.max_memory_usage.load(Ordering::Relaxed)
}
fn set_bucket_capacity(&self, capacity: usize) {
debug_assert_ne!(capacity, 0);
self.bucket_capacity.store(capacity, Ordering::Relaxed);
}
/// Doesn't actually allocate anything, but increments `self.memory_usage` and returns `None` if
/// the attempted amount surpasses `max_memory_usage`
// TODO: Make this return a `Result`
fn allocate_memory(&self, requested_mem: usize) -> LassoResult<()> {
if self.memory_usage.load(Ordering::Relaxed) + requested_mem
> self.max_memory_usage.load(Ordering::Relaxed)
{
Err(LassoError::new(LassoErrorKind::MemoryLimitReached))
} else {
self.memory_usage
.fetch_add(requested_mem, Ordering::Relaxed);
Ok(())
}
}
/// Store a slice in the Arena, returning `None` if memory is exhausted
///
/// # Safety
///
/// The reference passed back must be dropped before the arena that created it is
///
pub unsafe fn store_str(&self, string: &str) -> LassoResult<&'static str> {
// If the string is empty, simply return an empty string.
// This ensures that only strings with lengths greater
// than zero will be allocated within the arena
if string.is_empty() {
return Ok("");
}
let slice = string.as_bytes();
debug_assert_ne!(slice.len(), 0);
// Iterate over all of the buckets within the list while attempting to find one
// that has enough space to fit our string within it
//
// This is a tradeoff between allocation speed and memory usage. As-is we prioritize
// allocation speed in exchange for potentially missing possible reuse situations
// and then allocating more memory than is strictly necessary. In practice this shouldn't
// really matter, but it's worth that the opposite tradeoff can be made by adding bounded
// retries within this loop, the worst-case performance suffers in exchange for potentially
// better memory usage.
for bucket in self.buckets.iter() {
if let Ok(start) = bucket.try_inc_length(slice.len()) {
// Safety: We now have exclusive access to `bucket[start..start + slice.len()]`
let allocated = unsafe { bucket.slice_mut(start) };
// Copy the given slice into the allocation
unsafe { allocated.copy_from_nonoverlapping(slice.as_ptr(), slice.len()) };
// Return the successfully allocated string
let string = unsafe {
str::from_utf8_unchecked(slice::from_raw_parts(allocated, slice.len()))
};
return Ok(string);
}
// Otherwise the bucket doesn't have sufficient capacity for the string
// so we carry on searching through allocated buckets
}
// If we couldn't find a pre-existing bucket with enough room in it, allocate our own bucket
let next_capacity = self.bucket_capacity.load(Ordering::Relaxed) * 2;
debug_assert_ne!(next_capacity, 0);
// If the current string's length is greater than the doubled current capacity, allocate a bucket exactly the
// size of the large string and push it back in the buckets vector. This ensures that obscenely large strings will
// not permanently affect the resource consumption of the interner
if slice.len() > next_capacity {
// Check that we haven't exhausted our memory limit
self.allocate_memory(slice.len())?;
// Safety: `len` will never be zero since we explicitly handled zero-length strings
// at the beginning of the function
let non_zero_len = unsafe { NonZeroUsize::new_unchecked(slice.len()) };
debug_assert_ne!(slice.len(), 0);
let mut bucket = AtomicBucket::with_capacity(non_zero_len)?;
// Safety: The new bucket will have exactly enough room for the string and we have
// exclusive access to the bucket since we just created it
let allocated_string = unsafe { bucket.push_slice(slice) };
self.buckets.push_front(bucket.into_ref());
Ok(allocated_string)
} else {
let memory_usage = self.current_memory_usage();
let max_memory_usage = self.get_max_memory_usage();
// If trying to use the doubled capacity will surpass our memory limit, just allocate as much as we can
if memory_usage + next_capacity > max_memory_usage {
let remaining_memory = max_memory_usage.saturating_sub(memory_usage);
// Check that we haven't exhausted our memory limit
self.allocate_memory(remaining_memory)?;
// Set the capacity to twice of what it currently is to allow for fewer allocations as more strings are interned
let mut bucket = AtomicBucket::with_capacity(
NonZeroUsize::new(remaining_memory)
.ok_or_else(|| LassoError::new(LassoErrorKind::MemoryLimitReached))?,
)?;
// Safety: The new bucket will have exactly enough room for the string and we have
// exclusive access to the bucket since we just created it
let allocated_string = unsafe { bucket.push_slice(slice) };
// TODO: Push the bucket to the back or something so that we can get it somewhat out
// of the search path, reduce the `n` in the `O(n)` list traversal
self.buckets.push_front(bucket.into_ref());
Ok(allocated_string)
// Otherwise just allocate a normal doubled bucket
} else {
// Check that we haven't exhausted our memory limit
self.allocate_memory(next_capacity)?;
// Set the capacity to twice of what it currently is to allow for fewer allocations as more strings are interned
self.set_bucket_capacity(next_capacity);
// Safety: `next_capacity` will never be zero
let capacity = unsafe { NonZeroUsize::new_unchecked(next_capacity) };
debug_assert_ne!(next_capacity, 0);
let mut bucket = AtomicBucket::with_capacity(capacity)?;
// Safety: The new bucket will have enough room for the string
let allocated_string = unsafe { bucket.push_slice(slice) };
self.buckets.push_front(bucket.into_ref());
Ok(allocated_string)
}
}
}
}
impl Default for LockfreeArena {
fn default() -> Self {
Self::new(
Capacity::default().bytes,
MemoryLimits::default().max_memory_usage,
)
.expect("failed to create default arena")
}
}
impl Debug for LockfreeArena {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
struct TotalBuckets(usize);
impl Debug for TotalBuckets {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.0 == 1 {
f.write_str("...1 bucket")
} else {
write!(f, "...{} buckets", self.0)
}
}
}
f.debug_struct("Arena")
.field("buckets", &TotalBuckets(self.buckets.len()))
.field(
"bucket_capacity",
&self.bucket_capacity.load(Ordering::Relaxed),
)
.field("memory_usage", &self.memory_usage.load(Ordering::Relaxed))
.field(
"max_memory_usage",
&self.max_memory_usage.load(Ordering::Relaxed),
)
.finish()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn string() {
let arena = LockfreeArena::default();
unsafe {
let idx = arena.store_str("test");
assert_eq!(idx, Ok("test"));
}
}
#[test]
fn empty_str() {
let arena = LockfreeArena::default();
unsafe {
let zst = arena.store_str("");
let zst1 = arena.store_str("");
let zst2 = arena.store_str("");
assert_eq!(zst, Ok(""));
assert_eq!(zst1, Ok(""));
assert_eq!(zst2, Ok(""));
}
}
#[test]
fn exponential_allocations() {
let arena = LockfreeArena::default();
let mut len = 4096;
for _ in 0..10 {
let large_string = "a".repeat(len);
let arena_string = unsafe { arena.store_str(&large_string) };
assert_eq!(arena_string, Ok(large_string.as_str()));
len *= 2;
}
}
#[test]
fn memory_exhausted() {
let arena = LockfreeArena::new(NonZeroUsize::new(10).unwrap(), 10).unwrap();
unsafe {
assert!(arena.store_str("0123456789").is_ok());
// ZSTs take up zero bytes
arena.store_str("").unwrap();
let err = arena.store_str("a").unwrap_err();
assert!(err.kind().is_memory_limit());
let err = arena.store_str("dfgsagdfgsdf").unwrap_err();
assert!(err.kind().is_memory_limit());
}
}
#[test]
fn allocate_too_much() {
let arena = LockfreeArena::new(NonZeroUsize::new(1).unwrap(), 10).unwrap();
unsafe {
let err = arena.store_str("abcdefghijklmnopqrstuvwxyz").unwrap_err();
assert!(err.kind().is_memory_limit());
}
}
#[test]
fn allocate_more_than_double() {
let arena = LockfreeArena::new(NonZeroUsize::new(1).unwrap(), 1000).unwrap();
unsafe {
assert!(arena.store_str("abcdefghijklmnopqrstuvwxyz").is_ok());
}
}
} | bucket_capacity: AtomicUsize, | random_line_split |
lockfree.rs | use crate::{
arenas::atomic_bucket::{AtomicBucket, AtomicBucketList},
Capacity, LassoError, LassoErrorKind, LassoResult, MemoryLimits,
};
use core::{
fmt::{self, Debug},
num::NonZeroUsize,
slice, str,
sync::atomic::{AtomicUsize, Ordering},
};
/// An arena allocator that dynamically grows in size when needed, allocating memory in large chunks
pub(crate) struct LockfreeArena {
/// All the internal buckets, storing all allocated and unallocated items
// TODO: We could keep around a second list of buckets to store filled buckets
// in to keep us from having to iterate over them, need more tests to
// see what the impact of that is
buckets: AtomicBucketList,
/// The default capacity of each bucket
///
/// Invariant: `bucket_capacity` must never be zero
bucket_capacity: AtomicUsize,
memory_usage: AtomicUsize,
max_memory_usage: AtomicUsize,
}
impl LockfreeArena {
/// Create a new Arena with the default bucket size of 4096 bytes
pub fn new(capacity: NonZeroUsize, max_memory_usage: usize) -> LassoResult<Self> {
Ok(Self {
// Allocate one bucket
buckets: AtomicBucketList::new(capacity)?,
bucket_capacity: AtomicUsize::new(capacity.get()),
// The current capacity is whatever size the bucket we just allocated is
memory_usage: AtomicUsize::new(capacity.get()),
max_memory_usage: AtomicUsize::new(max_memory_usage),
})
}
#[inline]
pub(crate) fn current_memory_usage(&self) -> usize {
self.memory_usage.load(Ordering::Relaxed)
}
#[inline]
pub(crate) fn set_max_memory_usage(&self, max_memory_usage: usize) {
self.max_memory_usage
.store(max_memory_usage, Ordering::Relaxed);
}
#[inline]
pub(crate) fn get_max_memory_usage(&self) -> usize {
self.max_memory_usage.load(Ordering::Relaxed)
}
fn set_bucket_capacity(&self, capacity: usize) {
debug_assert_ne!(capacity, 0);
self.bucket_capacity.store(capacity, Ordering::Relaxed);
}
/// Doesn't actually allocate anything, but increments `self.memory_usage` and returns `None` if
/// the attempted amount surpasses `max_memory_usage`
// TODO: Make this return a `Result`
fn allocate_memory(&self, requested_mem: usize) -> LassoResult<()> {
if self.memory_usage.load(Ordering::Relaxed) + requested_mem
> self.max_memory_usage.load(Ordering::Relaxed)
{
Err(LassoError::new(LassoErrorKind::MemoryLimitReached))
} else {
self.memory_usage
.fetch_add(requested_mem, Ordering::Relaxed);
Ok(())
}
}
/// Store a slice in the Arena, returning `None` if memory is exhausted
///
/// # Safety
///
/// The reference passed back must be dropped before the arena that created it is
///
pub unsafe fn store_str(&self, string: &str) -> LassoResult<&'static str> {
// If the string is empty, simply return an empty string.
// This ensures that only strings with lengths greater
// than zero will be allocated within the arena
if string.is_empty() {
return Ok("");
}
let slice = string.as_bytes();
debug_assert_ne!(slice.len(), 0);
// Iterate over all of the buckets within the list while attempting to find one
// that has enough space to fit our string within it
//
// This is a tradeoff between allocation speed and memory usage. As-is we prioritize
// allocation speed in exchange for potentially missing possible reuse situations
// and then allocating more memory than is strictly necessary. In practice this shouldn't
// really matter, but it's worth that the opposite tradeoff can be made by adding bounded
// retries within this loop, the worst-case performance suffers in exchange for potentially
// better memory usage.
for bucket in self.buckets.iter() {
if let Ok(start) = bucket.try_inc_length(slice.len()) {
// Safety: We now have exclusive access to `bucket[start..start + slice.len()]`
let allocated = unsafe { bucket.slice_mut(start) };
// Copy the given slice into the allocation
unsafe { allocated.copy_from_nonoverlapping(slice.as_ptr(), slice.len()) };
// Return the successfully allocated string
let string = unsafe {
str::from_utf8_unchecked(slice::from_raw_parts(allocated, slice.len()))
};
return Ok(string);
}
// Otherwise the bucket doesn't have sufficient capacity for the string
// so we carry on searching through allocated buckets
}
// If we couldn't find a pre-existing bucket with enough room in it, allocate our own bucket
let next_capacity = self.bucket_capacity.load(Ordering::Relaxed) * 2;
debug_assert_ne!(next_capacity, 0);
// If the current string's length is greater than the doubled current capacity, allocate a bucket exactly the
// size of the large string and push it back in the buckets vector. This ensures that obscenely large strings will
// not permanently affect the resource consumption of the interner
if slice.len() > next_capacity {
// Check that we haven't exhausted our memory limit
self.allocate_memory(slice.len())?;
// Safety: `len` will never be zero since we explicitly handled zero-length strings
// at the beginning of the function
let non_zero_len = unsafe { NonZeroUsize::new_unchecked(slice.len()) };
debug_assert_ne!(slice.len(), 0);
let mut bucket = AtomicBucket::with_capacity(non_zero_len)?;
// Safety: The new bucket will have exactly enough room for the string and we have
// exclusive access to the bucket since we just created it
let allocated_string = unsafe { bucket.push_slice(slice) };
self.buckets.push_front(bucket.into_ref());
Ok(allocated_string)
} else {
let memory_usage = self.current_memory_usage();
let max_memory_usage = self.get_max_memory_usage();
// If trying to use the doubled capacity will surpass our memory limit, just allocate as much as we can
if memory_usage + next_capacity > max_memory_usage {
let remaining_memory = max_memory_usage.saturating_sub(memory_usage);
// Check that we haven't exhausted our memory limit
self.allocate_memory(remaining_memory)?;
// Set the capacity to twice of what it currently is to allow for fewer allocations as more strings are interned
let mut bucket = AtomicBucket::with_capacity(
NonZeroUsize::new(remaining_memory)
.ok_or_else(|| LassoError::new(LassoErrorKind::MemoryLimitReached))?,
)?;
// Safety: The new bucket will have exactly enough room for the string and we have
// exclusive access to the bucket since we just created it
let allocated_string = unsafe { bucket.push_slice(slice) };
// TODO: Push the bucket to the back or something so that we can get it somewhat out
// of the search path, reduce the `n` in the `O(n)` list traversal
self.buckets.push_front(bucket.into_ref());
Ok(allocated_string)
// Otherwise just allocate a normal doubled bucket
} else {
// Check that we haven't exhausted our memory limit
self.allocate_memory(next_capacity)?;
// Set the capacity to twice of what it currently is to allow for fewer allocations as more strings are interned
self.set_bucket_capacity(next_capacity);
// Safety: `next_capacity` will never be zero
let capacity = unsafe { NonZeroUsize::new_unchecked(next_capacity) };
debug_assert_ne!(next_capacity, 0);
let mut bucket = AtomicBucket::with_capacity(capacity)?;
// Safety: The new bucket will have enough room for the string
let allocated_string = unsafe { bucket.push_slice(slice) };
self.buckets.push_front(bucket.into_ref());
Ok(allocated_string)
}
}
}
}
impl Default for LockfreeArena {
fn default() -> Self {
Self::new(
Capacity::default().bytes,
MemoryLimits::default().max_memory_usage,
)
.expect("failed to create default arena")
}
}
impl Debug for LockfreeArena {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
struct TotalBuckets(usize);
impl Debug for TotalBuckets {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.0 == 1 {
f.write_str("...1 bucket")
} else {
write!(f, "...{} buckets", self.0)
}
}
}
f.debug_struct("Arena")
.field("buckets", &TotalBuckets(self.buckets.len()))
.field(
"bucket_capacity",
&self.bucket_capacity.load(Ordering::Relaxed),
)
.field("memory_usage", &self.memory_usage.load(Ordering::Relaxed))
.field(
"max_memory_usage",
&self.max_memory_usage.load(Ordering::Relaxed),
)
.finish()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn string() {
let arena = LockfreeArena::default();
unsafe {
let idx = arena.store_str("test");
assert_eq!(idx, Ok("test"));
}
}
#[test]
fn empty_str() {
let arena = LockfreeArena::default();
unsafe {
let zst = arena.store_str("");
let zst1 = arena.store_str("");
let zst2 = arena.store_str("");
assert_eq!(zst, Ok(""));
assert_eq!(zst1, Ok(""));
assert_eq!(zst2, Ok(""));
}
}
#[test]
fn exponential_allocations() {
let arena = LockfreeArena::default();
let mut len = 4096;
for _ in 0..10 {
let large_string = "a".repeat(len);
let arena_string = unsafe { arena.store_str(&large_string) };
assert_eq!(arena_string, Ok(large_string.as_str()));
len *= 2;
}
}
#[test]
fn memory_exhausted() {
let arena = LockfreeArena::new(NonZeroUsize::new(10).unwrap(), 10).unwrap();
unsafe {
assert!(arena.store_str("0123456789").is_ok());
// ZSTs take up zero bytes
arena.store_str("").unwrap();
let err = arena.store_str("a").unwrap_err();
assert!(err.kind().is_memory_limit());
let err = arena.store_str("dfgsagdfgsdf").unwrap_err();
assert!(err.kind().is_memory_limit());
}
}
#[test]
fn allocate_too_much() {
let arena = LockfreeArena::new(NonZeroUsize::new(1).unwrap(), 10).unwrap();
unsafe {
let err = arena.store_str("abcdefghijklmnopqrstuvwxyz").unwrap_err();
assert!(err.kind().is_memory_limit());
}
}
#[test]
fn allocate_more_than_double() |
}
| {
let arena = LockfreeArena::new(NonZeroUsize::new(1).unwrap(), 1000).unwrap();
unsafe {
assert!(arena.store_str("abcdefghijklmnopqrstuvwxyz").is_ok());
}
} | identifier_body |
config.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::net::SocketAddr;
use std::ops::Range;
use std::time::Duration;
use clap::{app_from_crate, crate_authors, crate_description, crate_name, crate_version, value_t, Arg, SubCommand};
use toml;
use serde_derive::{Deserialize, Serialize};
use raft_tokio::RaftOptions;
use crate::aggregate::AggregationMode;
use crate::management::{ConsensusAction, LeaderAction, MgmtCommand};
use crate::{ConsensusKind, ConsensusState};
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct System {
/// Logging level
pub verbosity: String,
/// Network settings
pub network: Network,
/// Internal Raft settings
pub raft: Raft,
/// Consul settings
pub consul: Consul,
/// Metric settings
pub metrics: Metrics,
/// Carbon backend settings
pub carbon: Carbon,
/// Number of networking threads, use 0 for number of CPUs
pub n_threads: usize,
/// Number of aggregating(worker) threads, set to 0 to use all CPU cores
pub w_threads: usize,
/// queue size for single counting thread before packet is dropped
pub task_queue_size: usize,
/// Should we start as leader state enabled or not
pub start_as_leader: bool,
/// How often to gather own stats, in ms. Use 0 to disable (stats are still gathered, but not included in
/// metric dump)
pub stats_interval: u64,
/// Prefix to send own metrics with
pub stats_prefix: String,
/// Consensus kind to use
pub consensus: ConsensusKind,
}
impl Default for System {
fn default() -> Self {
Self {
verbosity: "warn".to_string(),
network: Network::default(),
raft: Raft::default(),
consul: Consul::default(),
metrics: Metrics::default(),
carbon: Carbon::default(),
n_threads: 4,
w_threads: 4,
stats_interval: 10000,
task_queue_size: 2048,
start_as_leader: false,
stats_prefix: "resources.monitoring.bioyino".to_string(),
consensus: ConsensusKind::None,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Metrics {
// TODO: Maximum metric array size, 0 for unlimited
// max_metrics: usize,
/// Should we provide metrics with top update numbers
pub count_updates: bool,
/// Prefix for metric update statistics
pub update_counter_prefix: String,
/// Suffix for metric update statistics
pub update_counter_suffix: String,
/// Minimal update count to be reported
pub update_counter_threshold: u32,
/// Consistent parsing
pub consistent_parsing: bool,
/// Whether we should spam parsing errors in logs
pub log_parse_errors: bool,
/// Maximum length of data parser can keep in buffer befor considering it trash and throwing
/// away
pub max_unparsed_buffer: usize,
/// Choose the way of aggregation
pub aggregation_mode: AggregationMode,
/// Number of threads when aggregating in "multi" mode
pub aggregation_threads: Option<usize>,
}
impl Default for Metrics {
fn default() -> Self {
Self {
// max_metrics: 0,
count_updates: true,
update_counter_prefix: "resources.monitoring.bioyino.updates".to_string(),
update_counter_suffix: String::new(),
update_counter_threshold: 200,
consistent_parsing: true,
log_parse_errors: false,
max_unparsed_buffer: 10000,
aggregation_mode: AggregationMode::Single,
aggregation_threads: None,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Carbon {
// TODO: will be used when multiple backends support is implemented
///// Enable sending to carbon protocol backend
//pub enabled: bool,
/// IP and port of the carbon-protocol backend to send aggregated data to
pub address: String,
/// client bind address
pub bind_address: Option<SocketAddr>,
/// How often to send metrics to this backend, ms
pub interval: u64,
/// How much to sleep when connection to backend fails, ms
pub connect_delay: u64,
/// Multiply delay to this value for each consequent connection failure
pub connect_delay_multiplier: f32,
/// Maximum retry delay, ms
pub connect_delay_max: u64,
/// How much times to retry when sending data to backend before giving up and dropping all metrics
/// note, that 0 means 1 try
pub send_retries: usize,
/// The whole metrica array can be split into smaller chunks for each chunk to be sent
/// in a separate connection. This is a workaround for go-carbon and carbon-c-relay doing
/// per-connection processing and working ineffectively when lots of metrics is sent in one
/// connection
pub chunks: usize,
}
impl Default for Carbon {
fn default() -> Self {
Self {
// enabled: true,
address: "127.0.0.1:2003".to_string(),
bind_address: None,
interval: 30000,
connect_delay: 250,
connect_delay_multiplier: 2f32,
connect_delay_max: 10000,
send_retries: 30,
chunks: 1,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Network {
/// Address and UDP port to listen for statsd metrics on
pub listen: SocketAddr,
/// Address and port for replication server to listen on
pub peer_listen: SocketAddr,
/// Snapshot client bind address
pub peer_client_bind: Option<SocketAddr>,
/// Address and port for management server to listen on
pub mgmt_listen: SocketAddr,
/// UDP buffer size for single packet. Needs to be around MTU. Packet's bytes after that value
/// may be lost
pub bufsize: usize,
/// Enable multimessage(recvmmsg) mode
pub multimessage: bool,
/// Number of multimessage packets to receive at once if in multimessage mode
pub mm_packets: usize,
/// Number of multimessage packets to receive at once if in multimessage mode
pub mm_async: bool,
/// A timeout to return from multimessage mode syscall
pub mm_timeout: u64,
/// A timer to flush incoming buffer making sure metrics are not stuck there
pub buffer_flush_time: u64,
/// A length of incoming buffer to flush it making sure metrics are not stuck there
pub buffer_flush_length: usize,
/// Nmber of green threads for single-message mode
pub greens: usize,
/// Socket pool size for single-message mode
pub async_sockets: usize,
/// List of nodes to replicate metrics to
pub nodes: Vec<String>,
/// Interval to send snapshots to nodes, ms
pub snapshot_interval: usize,
}
impl Default for Network {
fn default() -> Self {
Self {
listen: "127.0.0.1:8125".parse().unwrap(),
peer_listen: "127.0.0.1:8136".parse().unwrap(),
peer_client_bind: None,
mgmt_listen: "127.0.0.1:8137".parse().unwrap(),
bufsize: 1500,
multimessage: false,
mm_packets: 100,
mm_async: false,
mm_timeout: 0,
buffer_flush_length: 0,
buffer_flush_time: 0,
greens: 4,
async_sockets: 4,
nodes: Vec::new(),
snapshot_interval: 1000,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Consul {
/// Start in disabled leader finding mode
pub start_as: ConsensusState,
/// Consul agent address
pub agent: SocketAddr,
/// TTL of consul session, ms (consul cannot set it to less than 10s)
pub session_ttl: usize,
/// How often to renew consul session, ms
pub renew_time: usize,
/// Name of ke to be locked in consul
pub key_name: String,
}
impl Default for Consul {
fn default() -> Self {
Self { start_as: ConsensusState::Disabled, agent: "127.0.0.1:8500".parse().unwrap(), session_ttl: 11000, renew_time: 1000, key_name: "service/bioyino/lock".to_string() }
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Raft {
/// Delay raft after start (ms)
pub start_delay: u64,
/// Raft heartbeat timeout (ms)
pub heartbeat_timeout: u64,
/// Raft heartbeat timeout (ms)
pub election_timeout_min: u64,
/// Raft heartbeat timeout (ms)
pub election_timeout_max: u64,
/// Name of this node. By default is taken by resolving hostname in DNS.
pub this_node: Option<String>,
/// List of Raft nodes, may include this_node
pub nodes: HashMap<String, u64>,
/// Bind raft client to specific IP when connecting nodes
pub client_bind: Option<SocketAddr>,
}
impl Default for Raft {
fn default() -> Self {
Self { start_delay: 0, heartbeat_timeout: 250, election_timeout_min: 500, election_timeout_max: 750, this_node: None, nodes: HashMap::new(), client_bind: None }
}
}
impl Raft {
pub fn get_raft_options(&self) -> RaftOptions {
RaftOptions {
heartbeat_timeout: Duration::from_millis(self.heartbeat_timeout),
election_timeout: Range { start: Duration::from_millis(self.election_timeout_min), end: Duration::from_millis(self.election_timeout_max) },
}
}
}
#[derive(Debug)]
pub enum Command {
Daemon,
Query(MgmtCommand, String),
}
impl System {
pub fn load() -> (Self, Command) {
// This is a first copy of args - with the "config" option
let app = app_from_crate!()
.long_version(concat!(crate_version!(), " ", env!("VERGEN_COMMIT_DATE"), " ", env!("VERGEN_SHA_SHORT")))
.arg(Arg::with_name("config").help("configuration file path").long("config").short("c").required(true).takes_value(true).default_value("/etc/bioyino/bioyino.toml"))
.arg(Arg::with_name("verbosity").short("v").help("logging level").takes_value(true))
.subcommand(SubCommand::with_name("query").about("send a management command to running bioyino server").arg(Arg::with_name("host").short("h").default_value("127.0.0.1:8137")).subcommand(SubCommand::with_name("status").about("get server state")).subcommand(SubCommand::with_name("consensus").arg(Arg::with_name("action").index(1)).arg(Arg::with_name("leader_action").index(2).default_value("unchanged"))))
.get_matches();
let config = value_t!(app.value_of("config"), String).expect("config file must be string");
let mut file = File::open(&config).expect(&format!("opening config file at {}", &config));
let mut config_str = String::new();
file.read_to_string(&mut config_str).expect("reading config file");
let mut system: System = toml::de::from_str(&config_str).expect("parsing config");
if let Some(v) = app.value_of("verbosity") {
system.verbosity = v.into()
}
if let Some(query) = app.subcommand_matches("query") {
let server = value_t!(query.value_of("host"), String).expect("bad server");
if let Some(_) = query.subcommand_matches("status") {
(system, Command::Query(MgmtCommand::Status, server))
} else if let Some(args) = query.subcommand_matches("consensus") | else {
// shold be unreachable
unreachable!("clap bug?")
}
} else {
(system, Command::Daemon)
}
}
}
| {
let c_action = value_t!(args.value_of("action"), ConsensusAction).expect("bad consensus action");
let l_action = value_t!(args.value_of("leader_action"), LeaderAction).expect("bad leader action");
(system, Command::Query(MgmtCommand::ConsensusCommand(c_action, l_action), server))
} | conditional_block |
config.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::net::SocketAddr;
use std::ops::Range;
use std::time::Duration;
use clap::{app_from_crate, crate_authors, crate_description, crate_name, crate_version, value_t, Arg, SubCommand};
use toml;
use serde_derive::{Deserialize, Serialize};
use raft_tokio::RaftOptions;
use crate::aggregate::AggregationMode;
use crate::management::{ConsensusAction, LeaderAction, MgmtCommand};
use crate::{ConsensusKind, ConsensusState};
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct System {
/// Logging level
pub verbosity: String,
/// Network settings
pub network: Network,
/// Internal Raft settings
pub raft: Raft,
/// Consul settings
pub consul: Consul,
/// Metric settings
pub metrics: Metrics,
/// Carbon backend settings
pub carbon: Carbon,
/// Number of networking threads, use 0 for number of CPUs
pub n_threads: usize,
/// Number of aggregating(worker) threads, set to 0 to use all CPU cores
pub w_threads: usize,
/// queue size for single counting thread before packet is dropped
pub task_queue_size: usize,
/// Should we start as leader state enabled or not
pub start_as_leader: bool,
/// How often to gather own stats, in ms. Use 0 to disable (stats are still gathered, but not included in
/// metric dump)
pub stats_interval: u64,
/// Prefix to send own metrics with
pub stats_prefix: String,
/// Consensus kind to use
pub consensus: ConsensusKind,
}
impl Default for System {
fn default() -> Self {
Self {
verbosity: "warn".to_string(),
network: Network::default(),
raft: Raft::default(),
consul: Consul::default(),
metrics: Metrics::default(),
carbon: Carbon::default(),
n_threads: 4,
w_threads: 4,
stats_interval: 10000,
task_queue_size: 2048,
start_as_leader: false,
stats_prefix: "resources.monitoring.bioyino".to_string(),
consensus: ConsensusKind::None,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Metrics {
// TODO: Maximum metric array size, 0 for unlimited
// max_metrics: usize,
/// Should we provide metrics with top update numbers
pub count_updates: bool,
/// Prefix for metric update statistics
pub update_counter_prefix: String,
/// Suffix for metric update statistics
pub update_counter_suffix: String,
/// Minimal update count to be reported
pub update_counter_threshold: u32,
/// Consistent parsing
pub consistent_parsing: bool,
/// Whether we should spam parsing errors in logs
pub log_parse_errors: bool,
/// Maximum length of data parser can keep in buffer befor considering it trash and throwing
/// away
pub max_unparsed_buffer: usize,
/// Choose the way of aggregation
pub aggregation_mode: AggregationMode,
/// Number of threads when aggregating in "multi" mode
pub aggregation_threads: Option<usize>,
}
impl Default for Metrics {
fn default() -> Self {
Self {
// max_metrics: 0,
count_updates: true,
update_counter_prefix: "resources.monitoring.bioyino.updates".to_string(),
update_counter_suffix: String::new(),
update_counter_threshold: 200,
consistent_parsing: true,
log_parse_errors: false,
max_unparsed_buffer: 10000,
aggregation_mode: AggregationMode::Single,
aggregation_threads: None,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Carbon {
// TODO: will be used when multiple backends support is implemented
///// Enable sending to carbon protocol backend
//pub enabled: bool,
/// IP and port of the carbon-protocol backend to send aggregated data to
pub address: String,
/// client bind address
pub bind_address: Option<SocketAddr>,
/// How often to send metrics to this backend, ms
pub interval: u64,
/// How much to sleep when connection to backend fails, ms
pub connect_delay: u64,
/// Multiply delay to this value for each consequent connection failure
pub connect_delay_multiplier: f32,
/// Maximum retry delay, ms
pub connect_delay_max: u64,
/// How much times to retry when sending data to backend before giving up and dropping all metrics
/// note, that 0 means 1 try
pub send_retries: usize,
/// The whole metrica array can be split into smaller chunks for each chunk to be sent
/// in a separate connection. This is a workaround for go-carbon and carbon-c-relay doing
/// per-connection processing and working ineffectively when lots of metrics is sent in one
/// connection
pub chunks: usize,
}
impl Default for Carbon {
fn default() -> Self {
Self {
// enabled: true,
address: "127.0.0.1:2003".to_string(),
bind_address: None,
interval: 30000,
connect_delay: 250,
connect_delay_multiplier: 2f32,
connect_delay_max: 10000,
send_retries: 30,
chunks: 1,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Network {
/// Address and UDP port to listen for statsd metrics on
pub listen: SocketAddr,
/// Address and port for replication server to listen on
pub peer_listen: SocketAddr,
/// Snapshot client bind address
pub peer_client_bind: Option<SocketAddr>,
/// Address and port for management server to listen on
pub mgmt_listen: SocketAddr,
/// UDP buffer size for single packet. Needs to be around MTU. Packet's bytes after that value
/// may be lost
pub bufsize: usize,
/// Enable multimessage(recvmmsg) mode
pub multimessage: bool,
/// Number of multimessage packets to receive at once if in multimessage mode
pub mm_packets: usize,
/// Number of multimessage packets to receive at once if in multimessage mode
pub mm_async: bool,
/// A timeout to return from multimessage mode syscall
pub mm_timeout: u64,
/// A timer to flush incoming buffer making sure metrics are not stuck there
pub buffer_flush_time: u64,
/// A length of incoming buffer to flush it making sure metrics are not stuck there
pub buffer_flush_length: usize,
/// Nmber of green threads for single-message mode
pub greens: usize,
/// Socket pool size for single-message mode
pub async_sockets: usize,
/// List of nodes to replicate metrics to
pub nodes: Vec<String>,
/// Interval to send snapshots to nodes, ms
pub snapshot_interval: usize,
}
impl Default for Network {
fn default() -> Self {
Self {
listen: "127.0.0.1:8125".parse().unwrap(),
peer_listen: "127.0.0.1:8136".parse().unwrap(),
peer_client_bind: None,
mgmt_listen: "127.0.0.1:8137".parse().unwrap(),
bufsize: 1500,
multimessage: false,
mm_packets: 100,
mm_async: false,
mm_timeout: 0,
buffer_flush_length: 0, | async_sockets: 4,
nodes: Vec::new(),
snapshot_interval: 1000,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Consul {
/// Start in disabled leader finding mode
pub start_as: ConsensusState,
/// Consul agent address
pub agent: SocketAddr,
/// TTL of consul session, ms (consul cannot set it to less than 10s)
pub session_ttl: usize,
/// How often to renew consul session, ms
pub renew_time: usize,
/// Name of ke to be locked in consul
pub key_name: String,
}
impl Default for Consul {
fn default() -> Self {
Self { start_as: ConsensusState::Disabled, agent: "127.0.0.1:8500".parse().unwrap(), session_ttl: 11000, renew_time: 1000, key_name: "service/bioyino/lock".to_string() }
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Raft {
/// Delay raft after start (ms)
pub start_delay: u64,
/// Raft heartbeat timeout (ms)
pub heartbeat_timeout: u64,
/// Raft heartbeat timeout (ms)
pub election_timeout_min: u64,
/// Raft heartbeat timeout (ms)
pub election_timeout_max: u64,
/// Name of this node. By default is taken by resolving hostname in DNS.
pub this_node: Option<String>,
/// List of Raft nodes, may include this_node
pub nodes: HashMap<String, u64>,
/// Bind raft client to specific IP when connecting nodes
pub client_bind: Option<SocketAddr>,
}
impl Default for Raft {
fn default() -> Self {
Self { start_delay: 0, heartbeat_timeout: 250, election_timeout_min: 500, election_timeout_max: 750, this_node: None, nodes: HashMap::new(), client_bind: None }
}
}
impl Raft {
pub fn get_raft_options(&self) -> RaftOptions {
RaftOptions {
heartbeat_timeout: Duration::from_millis(self.heartbeat_timeout),
election_timeout: Range { start: Duration::from_millis(self.election_timeout_min), end: Duration::from_millis(self.election_timeout_max) },
}
}
}
#[derive(Debug)]
pub enum Command {
Daemon,
Query(MgmtCommand, String),
}
impl System {
pub fn load() -> (Self, Command) {
// This is a first copy of args - with the "config" option
let app = app_from_crate!()
.long_version(concat!(crate_version!(), " ", env!("VERGEN_COMMIT_DATE"), " ", env!("VERGEN_SHA_SHORT")))
.arg(Arg::with_name("config").help("configuration file path").long("config").short("c").required(true).takes_value(true).default_value("/etc/bioyino/bioyino.toml"))
.arg(Arg::with_name("verbosity").short("v").help("logging level").takes_value(true))
.subcommand(SubCommand::with_name("query").about("send a management command to running bioyino server").arg(Arg::with_name("host").short("h").default_value("127.0.0.1:8137")).subcommand(SubCommand::with_name("status").about("get server state")).subcommand(SubCommand::with_name("consensus").arg(Arg::with_name("action").index(1)).arg(Arg::with_name("leader_action").index(2).default_value("unchanged"))))
.get_matches();
let config = value_t!(app.value_of("config"), String).expect("config file must be string");
let mut file = File::open(&config).expect(&format!("opening config file at {}", &config));
let mut config_str = String::new();
file.read_to_string(&mut config_str).expect("reading config file");
let mut system: System = toml::de::from_str(&config_str).expect("parsing config");
if let Some(v) = app.value_of("verbosity") {
system.verbosity = v.into()
}
if let Some(query) = app.subcommand_matches("query") {
let server = value_t!(query.value_of("host"), String).expect("bad server");
if let Some(_) = query.subcommand_matches("status") {
(system, Command::Query(MgmtCommand::Status, server))
} else if let Some(args) = query.subcommand_matches("consensus") {
let c_action = value_t!(args.value_of("action"), ConsensusAction).expect("bad consensus action");
let l_action = value_t!(args.value_of("leader_action"), LeaderAction).expect("bad leader action");
(system, Command::Query(MgmtCommand::ConsensusCommand(c_action, l_action), server))
} else {
// shold be unreachable
unreachable!("clap bug?")
}
} else {
(system, Command::Daemon)
}
}
} | buffer_flush_time: 0,
greens: 4, | random_line_split |
config.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::net::SocketAddr;
use std::ops::Range;
use std::time::Duration;
use clap::{app_from_crate, crate_authors, crate_description, crate_name, crate_version, value_t, Arg, SubCommand};
use toml;
use serde_derive::{Deserialize, Serialize};
use raft_tokio::RaftOptions;
use crate::aggregate::AggregationMode;
use crate::management::{ConsensusAction, LeaderAction, MgmtCommand};
use crate::{ConsensusKind, ConsensusState};
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct System {
/// Logging level
pub verbosity: String,
/// Network settings
pub network: Network,
/// Internal Raft settings
pub raft: Raft,
/// Consul settings
pub consul: Consul,
/// Metric settings
pub metrics: Metrics,
/// Carbon backend settings
pub carbon: Carbon,
/// Number of networking threads, use 0 for number of CPUs
pub n_threads: usize,
/// Number of aggregating(worker) threads, set to 0 to use all CPU cores
pub w_threads: usize,
/// queue size for single counting thread before packet is dropped
pub task_queue_size: usize,
/// Should we start as leader state enabled or not
pub start_as_leader: bool,
/// How often to gather own stats, in ms. Use 0 to disable (stats are still gathered, but not included in
/// metric dump)
pub stats_interval: u64,
/// Prefix to send own metrics with
pub stats_prefix: String,
/// Consensus kind to use
pub consensus: ConsensusKind,
}
impl Default for System {
fn default() -> Self {
Self {
verbosity: "warn".to_string(),
network: Network::default(),
raft: Raft::default(),
consul: Consul::default(),
metrics: Metrics::default(),
carbon: Carbon::default(),
n_threads: 4,
w_threads: 4,
stats_interval: 10000,
task_queue_size: 2048,
start_as_leader: false,
stats_prefix: "resources.monitoring.bioyino".to_string(),
consensus: ConsensusKind::None,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Metrics {
// TODO: Maximum metric array size, 0 for unlimited
// max_metrics: usize,
/// Should we provide metrics with top update numbers
pub count_updates: bool,
/// Prefix for metric update statistics
pub update_counter_prefix: String,
/// Suffix for metric update statistics
pub update_counter_suffix: String,
/// Minimal update count to be reported
pub update_counter_threshold: u32,
/// Consistent parsing
pub consistent_parsing: bool,
/// Whether we should spam parsing errors in logs
pub log_parse_errors: bool,
/// Maximum length of data parser can keep in buffer befor considering it trash and throwing
/// away
pub max_unparsed_buffer: usize,
/// Choose the way of aggregation
pub aggregation_mode: AggregationMode,
/// Number of threads when aggregating in "multi" mode
pub aggregation_threads: Option<usize>,
}
impl Default for Metrics {
fn default() -> Self {
Self {
// max_metrics: 0,
count_updates: true,
update_counter_prefix: "resources.monitoring.bioyino.updates".to_string(),
update_counter_suffix: String::new(),
update_counter_threshold: 200,
consistent_parsing: true,
log_parse_errors: false,
max_unparsed_buffer: 10000,
aggregation_mode: AggregationMode::Single,
aggregation_threads: None,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Carbon {
// TODO: will be used when multiple backends support is implemented
///// Enable sending to carbon protocol backend
//pub enabled: bool,
/// IP and port of the carbon-protocol backend to send aggregated data to
pub address: String,
/// client bind address
pub bind_address: Option<SocketAddr>,
/// How often to send metrics to this backend, ms
pub interval: u64,
/// How much to sleep when connection to backend fails, ms
pub connect_delay: u64,
/// Multiply delay to this value for each consequent connection failure
pub connect_delay_multiplier: f32,
/// Maximum retry delay, ms
pub connect_delay_max: u64,
/// How much times to retry when sending data to backend before giving up and dropping all metrics
/// note, that 0 means 1 try
pub send_retries: usize,
/// The whole metrica array can be split into smaller chunks for each chunk to be sent
/// in a separate connection. This is a workaround for go-carbon and carbon-c-relay doing
/// per-connection processing and working ineffectively when lots of metrics is sent in one
/// connection
pub chunks: usize,
}
impl Default for Carbon {
fn default() -> Self {
Self {
// enabled: true,
address: "127.0.0.1:2003".to_string(),
bind_address: None,
interval: 30000,
connect_delay: 250,
connect_delay_multiplier: 2f32,
connect_delay_max: 10000,
send_retries: 30,
chunks: 1,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Network {
/// Address and UDP port to listen for statsd metrics on
pub listen: SocketAddr,
/// Address and port for replication server to listen on
pub peer_listen: SocketAddr,
/// Snapshot client bind address
pub peer_client_bind: Option<SocketAddr>,
/// Address and port for management server to listen on
pub mgmt_listen: SocketAddr,
/// UDP buffer size for single packet. Needs to be around MTU. Packet's bytes after that value
/// may be lost
pub bufsize: usize,
/// Enable multimessage(recvmmsg) mode
pub multimessage: bool,
/// Number of multimessage packets to receive at once if in multimessage mode
pub mm_packets: usize,
/// Number of multimessage packets to receive at once if in multimessage mode
pub mm_async: bool,
/// A timeout to return from multimessage mode syscall
pub mm_timeout: u64,
/// A timer to flush incoming buffer making sure metrics are not stuck there
pub buffer_flush_time: u64,
/// A length of incoming buffer to flush it making sure metrics are not stuck there
pub buffer_flush_length: usize,
/// Nmber of green threads for single-message mode
pub greens: usize,
/// Socket pool size for single-message mode
pub async_sockets: usize,
/// List of nodes to replicate metrics to
pub nodes: Vec<String>,
/// Interval to send snapshots to nodes, ms
pub snapshot_interval: usize,
}
impl Default for Network {
fn default() -> Self {
Self {
listen: "127.0.0.1:8125".parse().unwrap(),
peer_listen: "127.0.0.1:8136".parse().unwrap(),
peer_client_bind: None,
mgmt_listen: "127.0.0.1:8137".parse().unwrap(),
bufsize: 1500,
multimessage: false,
mm_packets: 100,
mm_async: false,
mm_timeout: 0,
buffer_flush_length: 0,
buffer_flush_time: 0,
greens: 4,
async_sockets: 4,
nodes: Vec::new(),
snapshot_interval: 1000,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Consul {
/// Start in disabled leader finding mode
pub start_as: ConsensusState,
/// Consul agent address
pub agent: SocketAddr,
/// TTL of consul session, ms (consul cannot set it to less than 10s)
pub session_ttl: usize,
/// How often to renew consul session, ms
pub renew_time: usize,
/// Name of ke to be locked in consul
pub key_name: String,
}
impl Default for Consul {
fn default() -> Self {
Self { start_as: ConsensusState::Disabled, agent: "127.0.0.1:8500".parse().unwrap(), session_ttl: 11000, renew_time: 1000, key_name: "service/bioyino/lock".to_string() }
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", default, deny_unknown_fields)]
pub struct Raft {
/// Delay raft after start (ms)
pub start_delay: u64,
/// Raft heartbeat timeout (ms)
pub heartbeat_timeout: u64,
/// Raft heartbeat timeout (ms)
pub election_timeout_min: u64,
/// Raft heartbeat timeout (ms)
pub election_timeout_max: u64,
/// Name of this node. By default is taken by resolving hostname in DNS.
pub this_node: Option<String>,
/// List of Raft nodes, may include this_node
pub nodes: HashMap<String, u64>,
/// Bind raft client to specific IP when connecting nodes
pub client_bind: Option<SocketAddr>,
}
impl Default for Raft {
fn default() -> Self {
Self { start_delay: 0, heartbeat_timeout: 250, election_timeout_min: 500, election_timeout_max: 750, this_node: None, nodes: HashMap::new(), client_bind: None }
}
}
impl Raft {
pub fn get_raft_options(&self) -> RaftOptions {
RaftOptions {
heartbeat_timeout: Duration::from_millis(self.heartbeat_timeout),
election_timeout: Range { start: Duration::from_millis(self.election_timeout_min), end: Duration::from_millis(self.election_timeout_max) },
}
}
}
#[derive(Debug)]
pub enum | {
Daemon,
Query(MgmtCommand, String),
}
impl System {
pub fn load() -> (Self, Command) {
// This is a first copy of args - with the "config" option
let app = app_from_crate!()
.long_version(concat!(crate_version!(), " ", env!("VERGEN_COMMIT_DATE"), " ", env!("VERGEN_SHA_SHORT")))
.arg(Arg::with_name("config").help("configuration file path").long("config").short("c").required(true).takes_value(true).default_value("/etc/bioyino/bioyino.toml"))
.arg(Arg::with_name("verbosity").short("v").help("logging level").takes_value(true))
.subcommand(SubCommand::with_name("query").about("send a management command to running bioyino server").arg(Arg::with_name("host").short("h").default_value("127.0.0.1:8137")).subcommand(SubCommand::with_name("status").about("get server state")).subcommand(SubCommand::with_name("consensus").arg(Arg::with_name("action").index(1)).arg(Arg::with_name("leader_action").index(2).default_value("unchanged"))))
.get_matches();
let config = value_t!(app.value_of("config"), String).expect("config file must be string");
let mut file = File::open(&config).expect(&format!("opening config file at {}", &config));
let mut config_str = String::new();
file.read_to_string(&mut config_str).expect("reading config file");
let mut system: System = toml::de::from_str(&config_str).expect("parsing config");
if let Some(v) = app.value_of("verbosity") {
system.verbosity = v.into()
}
if let Some(query) = app.subcommand_matches("query") {
let server = value_t!(query.value_of("host"), String).expect("bad server");
if let Some(_) = query.subcommand_matches("status") {
(system, Command::Query(MgmtCommand::Status, server))
} else if let Some(args) = query.subcommand_matches("consensus") {
let c_action = value_t!(args.value_of("action"), ConsensusAction).expect("bad consensus action");
let l_action = value_t!(args.value_of("leader_action"), LeaderAction).expect("bad leader action");
(system, Command::Query(MgmtCommand::ConsensusCommand(c_action, l_action), server))
} else {
// shold be unreachable
unreachable!("clap bug?")
}
} else {
(system, Command::Daemon)
}
}
}
| Command | identifier_name |
building.rs | use std::collections::{BTreeMap, HashSet, VecDeque};
use std::fmt;
use serde::{Deserialize, Serialize};
use strum::IntoEnumIterator;
use strum_macros::{Display, EnumIter, EnumString};
use abstutil::{
deserialize_btreemap, deserialize_usize, serialize_btreemap, serialize_usize, Tags,
};
use geom::{Distance, PolyLine, Polygon, Pt2D};
use crate::{osm, LaneID, Map, PathConstraints, Position};
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct BuildingID(
#[serde(
serialize_with = "serialize_usize",
deserialize_with = "deserialize_usize"
)]
pub usize,
);
impl fmt::Display for BuildingID {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Building #{}", self.0)
}
}
/// A building has connections to the road and sidewalk, may contain commercial amenities, and have
/// off-street parking.
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Building {
pub id: BuildingID,
pub polygon: Polygon,
pub levels: f64,
pub address: String,
pub name: Option<NamePerLanguage>,
pub orig_id: osm::OsmID,
/// Where a text label should be centered to have the best chances of being contained within
/// the polygon.
pub label_center: Pt2D,
pub amenities: Vec<Amenity>,
pub bldg_type: BuildingType,
pub parking: OffstreetParking,
/// Depending on options while importing, these might be empty, to save file space.
pub osm_tags: Tags,
/// The building's connection for any agent can change based on map edits. Just store the one
/// for pedestrians and lazily calculate the others.
pub sidewalk_pos: Position,
/// Goes from building to sidewalk
pub driveway_geom: PolyLine,
}
/// A business located inside a building.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Amenity {
pub names: NamePerLanguage,
/// This is the specific amenity listed in OSM, not the more general `AmenityType` category.
pub amenity_type: String,
/// Depending on options while importing, these might be empty, to save file space.
pub osm_tags: Tags,
}
/// Represent no parking as Private(0, false).
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub enum OffstreetParking {
/// (Name, spots)
PublicGarage(String, usize),
/// (Spots, explicitly tagged as a garage)
Private(usize, bool),
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub enum BuildingType {
Residential {
num_residents: usize,
num_housing_units: usize,
},
/// An estimated number of residents, workers
ResidentialCommercial(usize, usize),
/// An estimated number of workers
Commercial(usize),
Empty,
}
impl BuildingType {
pub fn has_residents(&self) -> bool {
match self {
BuildingType::Residential {.. } | BuildingType::ResidentialCommercial(_, _) => true,
BuildingType::Commercial(_) | BuildingType::Empty => false,
}
}
}
/// None corresponds to the native name
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
pub struct NamePerLanguage(
#[serde(
serialize_with = "serialize_btreemap",
deserialize_with = "deserialize_btreemap"
)]
pub(crate) BTreeMap<Option<String>, String>,
);
impl NamePerLanguage {
pub fn get(&self, lang: Option<&String>) -> &String {
// TODO Can we avoid this clone?
let lang = lang.cloned();
if let Some(name) = self.0.get(&lang) {
return name;
}
&self.0[&None]
}
pub fn new(tags: &Tags) -> Option<NamePerLanguage> {
let native_name = tags.get(osm::NAME)?;
let mut map = BTreeMap::new();
map.insert(None, native_name.to_string());
for (k, v) in tags.inner() {
if let Some(lang) = k.strip_prefix("name:") {
map.insert(Some(lang.to_string()), v.to_string());
}
}
Some(NamePerLanguage(map))
}
pub fn unnamed() -> NamePerLanguage {
let mut map = BTreeMap::new();
map.insert(None, "unnamed".to_string());
NamePerLanguage(map)
}
}
impl Building {
pub fn sidewalk(&self) -> LaneID {
self.sidewalk_pos.lane()
}
pub fn house_number(&self) -> Option<String> {
let num = self.address.split(' ').next().unwrap();
if num!= "???" {
Some(num.to_string())
} else {
None
}
}
/// The polyline goes from the building to the driving position
// TODO Make this handle parking_blackhole
pub fn driving_connection(&self, map: &Map) -> Option<(Position, PolyLine)> |
/// Returns (biking position, sidewalk position). Could fail if the biking graph is
/// disconnected.
pub fn biking_connection(&self, map: &Map) -> Option<(Position, Position)> {
// Easy case: the building is directly next to a usable lane
if let Some(pair) = sidewalk_to_bike(self.sidewalk_pos, map) {
return Some(pair);
}
// Floodfill the sidewalk graph until we find a sidewalk<->bike connection.
let mut queue: VecDeque<LaneID> = VecDeque::new();
let mut visited: HashSet<LaneID> = HashSet::new();
queue.push_back(self.sidewalk());
loop {
if queue.is_empty() {
return None;
}
let l = queue.pop_front().unwrap();
if visited.contains(&l) {
continue;
}
visited.insert(l);
// TODO Could search by sidewalk endpoint
if let Some(pair) = sidewalk_to_bike(Position::new(l, map.get_l(l).length() / 2.0), map)
{
return Some(pair);
}
for t in map.get_turns_from_lane(l) {
if!visited.contains(&t.id.dst) {
queue.push_back(t.id.dst);
}
}
}
}
pub fn num_parking_spots(&self) -> usize {
match self.parking {
OffstreetParking::PublicGarage(_, n) => n,
OffstreetParking::Private(n, _) => n,
}
}
/// Does this building contain any amenity matching the category?
pub fn has_amenity(&self, category: AmenityType) -> bool {
for amenity in &self.amenities {
if AmenityType::categorize(&amenity.amenity_type) == Some(category) {
return true;
}
}
false
}
}
fn sidewalk_to_bike(sidewalk_pos: Position, map: &Map) -> Option<(Position, Position)> {
let lane = map
.get_parent(sidewalk_pos.lane())
.find_closest_lane(sidewalk_pos.lane(), |l| {
!l.biking_blackhole && PathConstraints::Bike.can_use(l, map)
})?;
// No buffer needed
Some((sidewalk_pos.equiv_pos(lane, map), sidewalk_pos))
}
/// Businesses are categorized into one of these types.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, EnumString, Display, EnumIter)]
pub enum AmenityType {
Bank,
Bar,
Beauty,
Bike,
Cafe,
CarRepair,
CarShare,
Childcare,
ConvenienceStore,
Culture,
Exercise,
FastFood,
Food,
GreenSpace,
Hotel,
Laundry,
Library,
Medical,
Pet,
Playground,
Pool,
PostOffice,
Religious,
School,
Shopping,
Supermarket,
Tourism,
University,
}
impl AmenityType {
fn types(self) -> Vec<&'static str> {
match self {
AmenityType::Bank => vec!["bank"],
AmenityType::Bar => vec!["bar", "pub", "nightclub", "biergarten"],
AmenityType::Beauty => vec!["hairdresser", "beauty", "chemist", "cosmetics"],
AmenityType::Bike => vec!["bicycle"],
AmenityType::Cafe => vec!["cafe", "pastry", "coffee", "tea", "bakery"],
AmenityType::CarRepair => vec!["car_repair"],
AmenityType::CarShare => vec!["car_sharing"],
AmenityType::Childcare => vec!["childcare", "kindergarten"],
AmenityType::ConvenienceStore => vec!["convenience"],
AmenityType::Culture => vec!["arts_centre", "art", "cinema", "theatre"],
AmenityType::Exercise => vec!["fitness_centre", "sports_centre", "track", "pitch"],
AmenityType::FastFood => vec!["fast_food", "food_court"],
AmenityType::Food => vec![
"restaurant",
"farm",
"ice_cream",
"seafood",
"cheese",
"chocolate",
"deli",
"butcher",
"confectionery",
"beverages",
"alcohol",
],
AmenityType::GreenSpace => vec!["park", "garden", "nature_reserve"],
AmenityType::Hotel => vec!["hotel", "hostel", "guest_house", "motel"],
AmenityType::Laundry => vec!["dry_cleaning", "laundry", "tailor"],
AmenityType::Library => vec!["library"],
AmenityType::Medical => vec![
"clinic", "dentist", "hospital", "pharmacy", "doctors", "optician",
],
AmenityType::Pet => vec!["veterinary", "pet", "animal_boarding", "pet_grooming"],
AmenityType::Playground => vec!["playground"],
AmenityType::Pool => vec!["swimming_pool"],
AmenityType::PostOffice => vec!["post_office"],
AmenityType::Religious => vec!["place_of_worship", "religion"],
AmenityType::School => vec!["school"],
AmenityType::Shopping => vec![
"wholesale",
"bag",
"marketplace",
"second_hand",
"charity",
"clothes",
"lottery",
"shoes",
"mall",
"department_store",
"car",
"tailor",
"nutrition_supplements",
"watches",
"craft",
"fabric",
"kiosk",
"antiques",
"shoemaker",
"hardware",
"houseware",
"mobile_phone",
"photo",
"toys",
"bed",
"florist",
"electronics",
"fishing",
"garden_centre",
"frame",
"watchmaker",
"boutique",
"mobile_phone",
"party",
"car_parts",
"video",
"video_games",
"musical_instrument",
"music",
"baby_goods",
"doityourself",
"jewelry",
"variety_store",
"gift",
"carpet",
"perfumery",
"curtain",
"appliance",
"furniture",
"lighting",
"sewing",
"books",
"sports",
"travel_agency",
"interior_decoration",
"stationery",
"computer",
"tyres",
"newsagent",
"general",
],
AmenityType::Supermarket => vec!["supermarket", "greengrocer"],
AmenityType::Tourism => vec![
"gallery",
"museum",
"zoo",
"attraction",
"theme_park",
"aquarium",
],
AmenityType::University => vec!["college", "university"],
}
}
/// All types of amenities, in alphabetical order.
pub fn all() -> Vec<AmenityType> {
AmenityType::iter().collect()
}
/// Categorize an OSM amenity tag.
pub fn categorize(a: &str) -> Option<AmenityType> {
for at in AmenityType::all() {
if at.types().contains(&a) {
return Some(at);
}
}
None
}
}
| {
let lane = map
.get_parent(self.sidewalk())
.find_closest_lane(self.sidewalk(), |l| PathConstraints::Car.can_use(l, map))?;
// TODO Do we need to insist on this buffer, now that we can make cars gradually appear?
let pos = self
.sidewalk_pos
.equiv_pos(lane, map)
.buffer_dist(Distance::meters(7.0), map)?;
Some((pos, self.driveway_geom.clone().optionally_push(pos.pt(map))))
} | identifier_body |
building.rs | use std::collections::{BTreeMap, HashSet, VecDeque};
use std::fmt;
use serde::{Deserialize, Serialize};
use strum::IntoEnumIterator;
use strum_macros::{Display, EnumIter, EnumString};
use abstutil::{
deserialize_btreemap, deserialize_usize, serialize_btreemap, serialize_usize, Tags,
};
use geom::{Distance, PolyLine, Polygon, Pt2D};
use crate::{osm, LaneID, Map, PathConstraints, Position};
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct BuildingID(
#[serde(
serialize_with = "serialize_usize",
deserialize_with = "deserialize_usize"
)]
pub usize,
);
impl fmt::Display for BuildingID {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Building #{}", self.0)
}
}
/// A building has connections to the road and sidewalk, may contain commercial amenities, and have
/// off-street parking.
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Building {
pub id: BuildingID,
pub polygon: Polygon,
pub levels: f64,
pub address: String,
pub name: Option<NamePerLanguage>,
pub orig_id: osm::OsmID,
/// Where a text label should be centered to have the best chances of being contained within
/// the polygon.
pub label_center: Pt2D,
pub amenities: Vec<Amenity>,
pub bldg_type: BuildingType,
pub parking: OffstreetParking,
/// Depending on options while importing, these might be empty, to save file space.
pub osm_tags: Tags,
/// The building's connection for any agent can change based on map edits. Just store the one
/// for pedestrians and lazily calculate the others.
pub sidewalk_pos: Position,
/// Goes from building to sidewalk
pub driveway_geom: PolyLine,
}
/// A business located inside a building.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Amenity {
pub names: NamePerLanguage,
/// This is the specific amenity listed in OSM, not the more general `AmenityType` category.
pub amenity_type: String,
/// Depending on options while importing, these might be empty, to save file space.
pub osm_tags: Tags,
}
/// Represent no parking as Private(0, false).
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub enum OffstreetParking {
/// (Name, spots)
PublicGarage(String, usize),
/// (Spots, explicitly tagged as a garage)
Private(usize, bool),
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub enum BuildingType {
Residential {
num_residents: usize,
num_housing_units: usize,
},
/// An estimated number of residents, workers
ResidentialCommercial(usize, usize),
/// An estimated number of workers
Commercial(usize),
Empty,
}
impl BuildingType {
pub fn has_residents(&self) -> bool {
match self {
BuildingType::Residential {.. } | BuildingType::ResidentialCommercial(_, _) => true,
BuildingType::Commercial(_) | BuildingType::Empty => false,
}
}
}
/// None corresponds to the native name
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
pub struct NamePerLanguage(
#[serde(
serialize_with = "serialize_btreemap",
deserialize_with = "deserialize_btreemap"
)]
pub(crate) BTreeMap<Option<String>, String>,
);
impl NamePerLanguage {
pub fn get(&self, lang: Option<&String>) -> &String {
// TODO Can we avoid this clone?
let lang = lang.cloned();
if let Some(name) = self.0.get(&lang) {
return name;
}
&self.0[&None]
}
pub fn new(tags: &Tags) -> Option<NamePerLanguage> {
let native_name = tags.get(osm::NAME)?;
let mut map = BTreeMap::new();
map.insert(None, native_name.to_string());
for (k, v) in tags.inner() {
if let Some(lang) = k.strip_prefix("name:") {
map.insert(Some(lang.to_string()), v.to_string());
}
}
Some(NamePerLanguage(map))
}
pub fn unnamed() -> NamePerLanguage {
let mut map = BTreeMap::new();
map.insert(None, "unnamed".to_string());
NamePerLanguage(map)
}
}
impl Building {
pub fn sidewalk(&self) -> LaneID {
self.sidewalk_pos.lane()
}
pub fn house_number(&self) -> Option<String> {
let num = self.address.split(' ').next().unwrap();
if num!= "???" {
Some(num.to_string())
} else {
None
}
}
/// The polyline goes from the building to the driving position
// TODO Make this handle parking_blackhole
pub fn driving_connection(&self, map: &Map) -> Option<(Position, PolyLine)> {
let lane = map
.get_parent(self.sidewalk())
.find_closest_lane(self.sidewalk(), |l| PathConstraints::Car.can_use(l, map))?;
// TODO Do we need to insist on this buffer, now that we can make cars gradually appear?
let pos = self
.sidewalk_pos
.equiv_pos(lane, map)
.buffer_dist(Distance::meters(7.0), map)?;
Some((pos, self.driveway_geom.clone().optionally_push(pos.pt(map))))
}
/// Returns (biking position, sidewalk position). Could fail if the biking graph is
/// disconnected.
pub fn biking_connection(&self, map: &Map) -> Option<(Position, Position)> {
// Easy case: the building is directly next to a usable lane
if let Some(pair) = sidewalk_to_bike(self.sidewalk_pos, map) {
return Some(pair);
}
// Floodfill the sidewalk graph until we find a sidewalk<->bike connection.
let mut queue: VecDeque<LaneID> = VecDeque::new();
let mut visited: HashSet<LaneID> = HashSet::new();
queue.push_back(self.sidewalk());
loop {
if queue.is_empty() {
return None;
}
let l = queue.pop_front().unwrap();
if visited.contains(&l) {
continue;
}
visited.insert(l);
// TODO Could search by sidewalk endpoint
if let Some(pair) = sidewalk_to_bike(Position::new(l, map.get_l(l).length() / 2.0), map)
{
return Some(pair);
}
for t in map.get_turns_from_lane(l) {
if!visited.contains(&t.id.dst) {
queue.push_back(t.id.dst);
}
}
}
}
pub fn num_parking_spots(&self) -> usize {
match self.parking {
OffstreetParking::PublicGarage(_, n) => n,
OffstreetParking::Private(n, _) => n,
}
}
/// Does this building contain any amenity matching the category?
pub fn has_amenity(&self, category: AmenityType) -> bool {
for amenity in &self.amenities {
if AmenityType::categorize(&amenity.amenity_type) == Some(category) {
return true;
}
}
false
}
}
fn sidewalk_to_bike(sidewalk_pos: Position, map: &Map) -> Option<(Position, Position)> {
let lane = map
.get_parent(sidewalk_pos.lane())
.find_closest_lane(sidewalk_pos.lane(), |l| {
!l.biking_blackhole && PathConstraints::Bike.can_use(l, map)
})?;
// No buffer needed
Some((sidewalk_pos.equiv_pos(lane, map), sidewalk_pos))
}
/// Businesses are categorized into one of these types.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, EnumString, Display, EnumIter)]
pub enum | {
Bank,
Bar,
Beauty,
Bike,
Cafe,
CarRepair,
CarShare,
Childcare,
ConvenienceStore,
Culture,
Exercise,
FastFood,
Food,
GreenSpace,
Hotel,
Laundry,
Library,
Medical,
Pet,
Playground,
Pool,
PostOffice,
Religious,
School,
Shopping,
Supermarket,
Tourism,
University,
}
impl AmenityType {
fn types(self) -> Vec<&'static str> {
match self {
AmenityType::Bank => vec!["bank"],
AmenityType::Bar => vec!["bar", "pub", "nightclub", "biergarten"],
AmenityType::Beauty => vec!["hairdresser", "beauty", "chemist", "cosmetics"],
AmenityType::Bike => vec!["bicycle"],
AmenityType::Cafe => vec!["cafe", "pastry", "coffee", "tea", "bakery"],
AmenityType::CarRepair => vec!["car_repair"],
AmenityType::CarShare => vec!["car_sharing"],
AmenityType::Childcare => vec!["childcare", "kindergarten"],
AmenityType::ConvenienceStore => vec!["convenience"],
AmenityType::Culture => vec!["arts_centre", "art", "cinema", "theatre"],
AmenityType::Exercise => vec!["fitness_centre", "sports_centre", "track", "pitch"],
AmenityType::FastFood => vec!["fast_food", "food_court"],
AmenityType::Food => vec![
"restaurant",
"farm",
"ice_cream",
"seafood",
"cheese",
"chocolate",
"deli",
"butcher",
"confectionery",
"beverages",
"alcohol",
],
AmenityType::GreenSpace => vec!["park", "garden", "nature_reserve"],
AmenityType::Hotel => vec!["hotel", "hostel", "guest_house", "motel"],
AmenityType::Laundry => vec!["dry_cleaning", "laundry", "tailor"],
AmenityType::Library => vec!["library"],
AmenityType::Medical => vec![
"clinic", "dentist", "hospital", "pharmacy", "doctors", "optician",
],
AmenityType::Pet => vec!["veterinary", "pet", "animal_boarding", "pet_grooming"],
AmenityType::Playground => vec!["playground"],
AmenityType::Pool => vec!["swimming_pool"],
AmenityType::PostOffice => vec!["post_office"],
AmenityType::Religious => vec!["place_of_worship", "religion"],
AmenityType::School => vec!["school"],
AmenityType::Shopping => vec![
"wholesale",
"bag",
"marketplace",
"second_hand",
"charity",
"clothes",
"lottery",
"shoes",
"mall",
"department_store",
"car",
"tailor",
"nutrition_supplements",
"watches",
"craft",
"fabric",
"kiosk",
"antiques",
"shoemaker",
"hardware",
"houseware",
"mobile_phone",
"photo",
"toys",
"bed",
"florist",
"electronics",
"fishing",
"garden_centre",
"frame",
"watchmaker",
"boutique",
"mobile_phone",
"party",
"car_parts",
"video",
"video_games",
"musical_instrument",
"music",
"baby_goods",
"doityourself",
"jewelry",
"variety_store",
"gift",
"carpet",
"perfumery",
"curtain",
"appliance",
"furniture",
"lighting",
"sewing",
"books",
"sports",
"travel_agency",
"interior_decoration",
"stationery",
"computer",
"tyres",
"newsagent",
"general",
],
AmenityType::Supermarket => vec!["supermarket", "greengrocer"],
AmenityType::Tourism => vec![
"gallery",
"museum",
"zoo",
"attraction",
"theme_park",
"aquarium",
],
AmenityType::University => vec!["college", "university"],
}
}
/// All types of amenities, in alphabetical order.
pub fn all() -> Vec<AmenityType> {
AmenityType::iter().collect()
}
/// Categorize an OSM amenity tag.
pub fn categorize(a: &str) -> Option<AmenityType> {
for at in AmenityType::all() {
if at.types().contains(&a) {
return Some(at);
}
}
None
}
}
| AmenityType | identifier_name |
building.rs | use std::collections::{BTreeMap, HashSet, VecDeque};
use std::fmt;
use serde::{Deserialize, Serialize};
use strum::IntoEnumIterator;
use strum_macros::{Display, EnumIter, EnumString};
use abstutil::{
deserialize_btreemap, deserialize_usize, serialize_btreemap, serialize_usize, Tags,
};
use geom::{Distance, PolyLine, Polygon, Pt2D};
use crate::{osm, LaneID, Map, PathConstraints, Position};
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct BuildingID(
#[serde(
serialize_with = "serialize_usize",
deserialize_with = "deserialize_usize"
)]
pub usize,
);
impl fmt::Display for BuildingID {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Building #{}", self.0)
}
}
/// A building has connections to the road and sidewalk, may contain commercial amenities, and have
/// off-street parking.
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Building {
pub id: BuildingID,
pub polygon: Polygon,
pub levels: f64,
pub address: String,
pub name: Option<NamePerLanguage>,
pub orig_id: osm::OsmID,
/// Where a text label should be centered to have the best chances of being contained within
/// the polygon.
pub label_center: Pt2D,
pub amenities: Vec<Amenity>,
pub bldg_type: BuildingType,
pub parking: OffstreetParking,
/// Depending on options while importing, these might be empty, to save file space.
pub osm_tags: Tags,
/// The building's connection for any agent can change based on map edits. Just store the one
/// for pedestrians and lazily calculate the others.
pub sidewalk_pos: Position,
/// Goes from building to sidewalk
pub driveway_geom: PolyLine,
}
/// A business located inside a building.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Amenity {
pub names: NamePerLanguage,
/// This is the specific amenity listed in OSM, not the more general `AmenityType` category.
pub amenity_type: String,
/// Depending on options while importing, these might be empty, to save file space.
pub osm_tags: Tags,
}
/// Represent no parking as Private(0, false).
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub enum OffstreetParking {
/// (Name, spots)
PublicGarage(String, usize),
/// (Spots, explicitly tagged as a garage)
Private(usize, bool),
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub enum BuildingType {
Residential {
num_residents: usize,
num_housing_units: usize,
},
/// An estimated number of residents, workers
ResidentialCommercial(usize, usize),
/// An estimated number of workers
Commercial(usize),
Empty,
}
impl BuildingType {
pub fn has_residents(&self) -> bool {
match self {
BuildingType::Residential {.. } | BuildingType::ResidentialCommercial(_, _) => true,
BuildingType::Commercial(_) | BuildingType::Empty => false,
}
}
}
/// None corresponds to the native name
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
pub struct NamePerLanguage(
#[serde(
serialize_with = "serialize_btreemap",
deserialize_with = "deserialize_btreemap"
)]
pub(crate) BTreeMap<Option<String>, String>,
);
impl NamePerLanguage {
pub fn get(&self, lang: Option<&String>) -> &String {
// TODO Can we avoid this clone?
let lang = lang.cloned();
if let Some(name) = self.0.get(&lang) { | return name;
}
&self.0[&None]
}
pub fn new(tags: &Tags) -> Option<NamePerLanguage> {
let native_name = tags.get(osm::NAME)?;
let mut map = BTreeMap::new();
map.insert(None, native_name.to_string());
for (k, v) in tags.inner() {
if let Some(lang) = k.strip_prefix("name:") {
map.insert(Some(lang.to_string()), v.to_string());
}
}
Some(NamePerLanguage(map))
}
pub fn unnamed() -> NamePerLanguage {
let mut map = BTreeMap::new();
map.insert(None, "unnamed".to_string());
NamePerLanguage(map)
}
}
impl Building {
pub fn sidewalk(&self) -> LaneID {
self.sidewalk_pos.lane()
}
pub fn house_number(&self) -> Option<String> {
let num = self.address.split(' ').next().unwrap();
if num!= "???" {
Some(num.to_string())
} else {
None
}
}
/// The polyline goes from the building to the driving position
// TODO Make this handle parking_blackhole
pub fn driving_connection(&self, map: &Map) -> Option<(Position, PolyLine)> {
let lane = map
.get_parent(self.sidewalk())
.find_closest_lane(self.sidewalk(), |l| PathConstraints::Car.can_use(l, map))?;
// TODO Do we need to insist on this buffer, now that we can make cars gradually appear?
let pos = self
.sidewalk_pos
.equiv_pos(lane, map)
.buffer_dist(Distance::meters(7.0), map)?;
Some((pos, self.driveway_geom.clone().optionally_push(pos.pt(map))))
}
/// Returns (biking position, sidewalk position). Could fail if the biking graph is
/// disconnected.
pub fn biking_connection(&self, map: &Map) -> Option<(Position, Position)> {
// Easy case: the building is directly next to a usable lane
if let Some(pair) = sidewalk_to_bike(self.sidewalk_pos, map) {
return Some(pair);
}
// Floodfill the sidewalk graph until we find a sidewalk<->bike connection.
let mut queue: VecDeque<LaneID> = VecDeque::new();
let mut visited: HashSet<LaneID> = HashSet::new();
queue.push_back(self.sidewalk());
loop {
if queue.is_empty() {
return None;
}
let l = queue.pop_front().unwrap();
if visited.contains(&l) {
continue;
}
visited.insert(l);
// TODO Could search by sidewalk endpoint
if let Some(pair) = sidewalk_to_bike(Position::new(l, map.get_l(l).length() / 2.0), map)
{
return Some(pair);
}
for t in map.get_turns_from_lane(l) {
if!visited.contains(&t.id.dst) {
queue.push_back(t.id.dst);
}
}
}
}
pub fn num_parking_spots(&self) -> usize {
match self.parking {
OffstreetParking::PublicGarage(_, n) => n,
OffstreetParking::Private(n, _) => n,
}
}
/// Does this building contain any amenity matching the category?
pub fn has_amenity(&self, category: AmenityType) -> bool {
for amenity in &self.amenities {
if AmenityType::categorize(&amenity.amenity_type) == Some(category) {
return true;
}
}
false
}
}
fn sidewalk_to_bike(sidewalk_pos: Position, map: &Map) -> Option<(Position, Position)> {
let lane = map
.get_parent(sidewalk_pos.lane())
.find_closest_lane(sidewalk_pos.lane(), |l| {
!l.biking_blackhole && PathConstraints::Bike.can_use(l, map)
})?;
// No buffer needed
Some((sidewalk_pos.equiv_pos(lane, map), sidewalk_pos))
}
/// Businesses are categorized into one of these types.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, EnumString, Display, EnumIter)]
pub enum AmenityType {
Bank,
Bar,
Beauty,
Bike,
Cafe,
CarRepair,
CarShare,
Childcare,
ConvenienceStore,
Culture,
Exercise,
FastFood,
Food,
GreenSpace,
Hotel,
Laundry,
Library,
Medical,
Pet,
Playground,
Pool,
PostOffice,
Religious,
School,
Shopping,
Supermarket,
Tourism,
University,
}
impl AmenityType {
fn types(self) -> Vec<&'static str> {
match self {
AmenityType::Bank => vec!["bank"],
AmenityType::Bar => vec!["bar", "pub", "nightclub", "biergarten"],
AmenityType::Beauty => vec!["hairdresser", "beauty", "chemist", "cosmetics"],
AmenityType::Bike => vec!["bicycle"],
AmenityType::Cafe => vec!["cafe", "pastry", "coffee", "tea", "bakery"],
AmenityType::CarRepair => vec!["car_repair"],
AmenityType::CarShare => vec!["car_sharing"],
AmenityType::Childcare => vec!["childcare", "kindergarten"],
AmenityType::ConvenienceStore => vec!["convenience"],
AmenityType::Culture => vec!["arts_centre", "art", "cinema", "theatre"],
AmenityType::Exercise => vec!["fitness_centre", "sports_centre", "track", "pitch"],
AmenityType::FastFood => vec!["fast_food", "food_court"],
AmenityType::Food => vec![
"restaurant",
"farm",
"ice_cream",
"seafood",
"cheese",
"chocolate",
"deli",
"butcher",
"confectionery",
"beverages",
"alcohol",
],
AmenityType::GreenSpace => vec!["park", "garden", "nature_reserve"],
AmenityType::Hotel => vec!["hotel", "hostel", "guest_house", "motel"],
AmenityType::Laundry => vec!["dry_cleaning", "laundry", "tailor"],
AmenityType::Library => vec!["library"],
AmenityType::Medical => vec![
"clinic", "dentist", "hospital", "pharmacy", "doctors", "optician",
],
AmenityType::Pet => vec!["veterinary", "pet", "animal_boarding", "pet_grooming"],
AmenityType::Playground => vec!["playground"],
AmenityType::Pool => vec!["swimming_pool"],
AmenityType::PostOffice => vec!["post_office"],
AmenityType::Religious => vec!["place_of_worship", "religion"],
AmenityType::School => vec!["school"],
AmenityType::Shopping => vec![
"wholesale",
"bag",
"marketplace",
"second_hand",
"charity",
"clothes",
"lottery",
"shoes",
"mall",
"department_store",
"car",
"tailor",
"nutrition_supplements",
"watches",
"craft",
"fabric",
"kiosk",
"antiques",
"shoemaker",
"hardware",
"houseware",
"mobile_phone",
"photo",
"toys",
"bed",
"florist",
"electronics",
"fishing",
"garden_centre",
"frame",
"watchmaker",
"boutique",
"mobile_phone",
"party",
"car_parts",
"video",
"video_games",
"musical_instrument",
"music",
"baby_goods",
"doityourself",
"jewelry",
"variety_store",
"gift",
"carpet",
"perfumery",
"curtain",
"appliance",
"furniture",
"lighting",
"sewing",
"books",
"sports",
"travel_agency",
"interior_decoration",
"stationery",
"computer",
"tyres",
"newsagent",
"general",
],
AmenityType::Supermarket => vec!["supermarket", "greengrocer"],
AmenityType::Tourism => vec![
"gallery",
"museum",
"zoo",
"attraction",
"theme_park",
"aquarium",
],
AmenityType::University => vec!["college", "university"],
}
}
/// All types of amenities, in alphabetical order.
pub fn all() -> Vec<AmenityType> {
AmenityType::iter().collect()
}
/// Categorize an OSM amenity tag.
pub fn categorize(a: &str) -> Option<AmenityType> {
for at in AmenityType::all() {
if at.types().contains(&a) {
return Some(at);
}
}
None
}
} | random_line_split |
|
building.rs | use std::collections::{BTreeMap, HashSet, VecDeque};
use std::fmt;
use serde::{Deserialize, Serialize};
use strum::IntoEnumIterator;
use strum_macros::{Display, EnumIter, EnumString};
use abstutil::{
deserialize_btreemap, deserialize_usize, serialize_btreemap, serialize_usize, Tags,
};
use geom::{Distance, PolyLine, Polygon, Pt2D};
use crate::{osm, LaneID, Map, PathConstraints, Position};
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct BuildingID(
#[serde(
serialize_with = "serialize_usize",
deserialize_with = "deserialize_usize"
)]
pub usize,
);
impl fmt::Display for BuildingID {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Building #{}", self.0)
}
}
/// A building has connections to the road and sidewalk, may contain commercial amenities, and have
/// off-street parking.
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Building {
pub id: BuildingID,
pub polygon: Polygon,
pub levels: f64,
pub address: String,
pub name: Option<NamePerLanguage>,
pub orig_id: osm::OsmID,
/// Where a text label should be centered to have the best chances of being contained within
/// the polygon.
pub label_center: Pt2D,
pub amenities: Vec<Amenity>,
pub bldg_type: BuildingType,
pub parking: OffstreetParking,
/// Depending on options while importing, these might be empty, to save file space.
pub osm_tags: Tags,
/// The building's connection for any agent can change based on map edits. Just store the one
/// for pedestrians and lazily calculate the others.
pub sidewalk_pos: Position,
/// Goes from building to sidewalk
pub driveway_geom: PolyLine,
}
/// A business located inside a building.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Amenity {
pub names: NamePerLanguage,
/// This is the specific amenity listed in OSM, not the more general `AmenityType` category.
pub amenity_type: String,
/// Depending on options while importing, these might be empty, to save file space.
pub osm_tags: Tags,
}
/// Represent no parking as Private(0, false).
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub enum OffstreetParking {
/// (Name, spots)
PublicGarage(String, usize),
/// (Spots, explicitly tagged as a garage)
Private(usize, bool),
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub enum BuildingType {
Residential {
num_residents: usize,
num_housing_units: usize,
},
/// An estimated number of residents, workers
ResidentialCommercial(usize, usize),
/// An estimated number of workers
Commercial(usize),
Empty,
}
impl BuildingType {
pub fn has_residents(&self) -> bool {
match self {
BuildingType::Residential {.. } | BuildingType::ResidentialCommercial(_, _) => true,
BuildingType::Commercial(_) | BuildingType::Empty => false,
}
}
}
/// None corresponds to the native name
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
pub struct NamePerLanguage(
#[serde(
serialize_with = "serialize_btreemap",
deserialize_with = "deserialize_btreemap"
)]
pub(crate) BTreeMap<Option<String>, String>,
);
impl NamePerLanguage {
pub fn get(&self, lang: Option<&String>) -> &String {
// TODO Can we avoid this clone?
let lang = lang.cloned();
if let Some(name) = self.0.get(&lang) {
return name;
}
&self.0[&None]
}
pub fn new(tags: &Tags) -> Option<NamePerLanguage> {
let native_name = tags.get(osm::NAME)?;
let mut map = BTreeMap::new();
map.insert(None, native_name.to_string());
for (k, v) in tags.inner() {
if let Some(lang) = k.strip_prefix("name:") {
map.insert(Some(lang.to_string()), v.to_string());
}
}
Some(NamePerLanguage(map))
}
pub fn unnamed() -> NamePerLanguage {
let mut map = BTreeMap::new();
map.insert(None, "unnamed".to_string());
NamePerLanguage(map)
}
}
impl Building {
pub fn sidewalk(&self) -> LaneID {
self.sidewalk_pos.lane()
}
pub fn house_number(&self) -> Option<String> {
let num = self.address.split(' ').next().unwrap();
if num!= "???" {
Some(num.to_string())
} else {
None
}
}
/// The polyline goes from the building to the driving position
// TODO Make this handle parking_blackhole
pub fn driving_connection(&self, map: &Map) -> Option<(Position, PolyLine)> {
let lane = map
.get_parent(self.sidewalk())
.find_closest_lane(self.sidewalk(), |l| PathConstraints::Car.can_use(l, map))?;
// TODO Do we need to insist on this buffer, now that we can make cars gradually appear?
let pos = self
.sidewalk_pos
.equiv_pos(lane, map)
.buffer_dist(Distance::meters(7.0), map)?;
Some((pos, self.driveway_geom.clone().optionally_push(pos.pt(map))))
}
/// Returns (biking position, sidewalk position). Could fail if the biking graph is
/// disconnected.
pub fn biking_connection(&self, map: &Map) -> Option<(Position, Position)> {
// Easy case: the building is directly next to a usable lane
if let Some(pair) = sidewalk_to_bike(self.sidewalk_pos, map) {
return Some(pair);
}
// Floodfill the sidewalk graph until we find a sidewalk<->bike connection.
let mut queue: VecDeque<LaneID> = VecDeque::new();
let mut visited: HashSet<LaneID> = HashSet::new();
queue.push_back(self.sidewalk());
loop {
if queue.is_empty() {
return None;
}
let l = queue.pop_front().unwrap();
if visited.contains(&l) {
continue;
}
visited.insert(l);
// TODO Could search by sidewalk endpoint
if let Some(pair) = sidewalk_to_bike(Position::new(l, map.get_l(l).length() / 2.0), map)
{
return Some(pair);
}
for t in map.get_turns_from_lane(l) {
if!visited.contains(&t.id.dst) {
queue.push_back(t.id.dst);
}
}
}
}
pub fn num_parking_spots(&self) -> usize {
match self.parking {
OffstreetParking::PublicGarage(_, n) => n,
OffstreetParking::Private(n, _) => n,
}
}
/// Does this building contain any amenity matching the category?
pub fn has_amenity(&self, category: AmenityType) -> bool {
for amenity in &self.amenities {
if AmenityType::categorize(&amenity.amenity_type) == Some(category) |
}
false
}
}
fn sidewalk_to_bike(sidewalk_pos: Position, map: &Map) -> Option<(Position, Position)> {
let lane = map
.get_parent(sidewalk_pos.lane())
.find_closest_lane(sidewalk_pos.lane(), |l| {
!l.biking_blackhole && PathConstraints::Bike.can_use(l, map)
})?;
// No buffer needed
Some((sidewalk_pos.equiv_pos(lane, map), sidewalk_pos))
}
/// Businesses are categorized into one of these types.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, EnumString, Display, EnumIter)]
pub enum AmenityType {
Bank,
Bar,
Beauty,
Bike,
Cafe,
CarRepair,
CarShare,
Childcare,
ConvenienceStore,
Culture,
Exercise,
FastFood,
Food,
GreenSpace,
Hotel,
Laundry,
Library,
Medical,
Pet,
Playground,
Pool,
PostOffice,
Religious,
School,
Shopping,
Supermarket,
Tourism,
University,
}
impl AmenityType {
fn types(self) -> Vec<&'static str> {
match self {
AmenityType::Bank => vec!["bank"],
AmenityType::Bar => vec!["bar", "pub", "nightclub", "biergarten"],
AmenityType::Beauty => vec!["hairdresser", "beauty", "chemist", "cosmetics"],
AmenityType::Bike => vec!["bicycle"],
AmenityType::Cafe => vec!["cafe", "pastry", "coffee", "tea", "bakery"],
AmenityType::CarRepair => vec!["car_repair"],
AmenityType::CarShare => vec!["car_sharing"],
AmenityType::Childcare => vec!["childcare", "kindergarten"],
AmenityType::ConvenienceStore => vec!["convenience"],
AmenityType::Culture => vec!["arts_centre", "art", "cinema", "theatre"],
AmenityType::Exercise => vec!["fitness_centre", "sports_centre", "track", "pitch"],
AmenityType::FastFood => vec!["fast_food", "food_court"],
AmenityType::Food => vec![
"restaurant",
"farm",
"ice_cream",
"seafood",
"cheese",
"chocolate",
"deli",
"butcher",
"confectionery",
"beverages",
"alcohol",
],
AmenityType::GreenSpace => vec!["park", "garden", "nature_reserve"],
AmenityType::Hotel => vec!["hotel", "hostel", "guest_house", "motel"],
AmenityType::Laundry => vec!["dry_cleaning", "laundry", "tailor"],
AmenityType::Library => vec!["library"],
AmenityType::Medical => vec![
"clinic", "dentist", "hospital", "pharmacy", "doctors", "optician",
],
AmenityType::Pet => vec!["veterinary", "pet", "animal_boarding", "pet_grooming"],
AmenityType::Playground => vec!["playground"],
AmenityType::Pool => vec!["swimming_pool"],
AmenityType::PostOffice => vec!["post_office"],
AmenityType::Religious => vec!["place_of_worship", "religion"],
AmenityType::School => vec!["school"],
AmenityType::Shopping => vec![
"wholesale",
"bag",
"marketplace",
"second_hand",
"charity",
"clothes",
"lottery",
"shoes",
"mall",
"department_store",
"car",
"tailor",
"nutrition_supplements",
"watches",
"craft",
"fabric",
"kiosk",
"antiques",
"shoemaker",
"hardware",
"houseware",
"mobile_phone",
"photo",
"toys",
"bed",
"florist",
"electronics",
"fishing",
"garden_centre",
"frame",
"watchmaker",
"boutique",
"mobile_phone",
"party",
"car_parts",
"video",
"video_games",
"musical_instrument",
"music",
"baby_goods",
"doityourself",
"jewelry",
"variety_store",
"gift",
"carpet",
"perfumery",
"curtain",
"appliance",
"furniture",
"lighting",
"sewing",
"books",
"sports",
"travel_agency",
"interior_decoration",
"stationery",
"computer",
"tyres",
"newsagent",
"general",
],
AmenityType::Supermarket => vec!["supermarket", "greengrocer"],
AmenityType::Tourism => vec![
"gallery",
"museum",
"zoo",
"attraction",
"theme_park",
"aquarium",
],
AmenityType::University => vec!["college", "university"],
}
}
/// All types of amenities, in alphabetical order.
pub fn all() -> Vec<AmenityType> {
AmenityType::iter().collect()
}
/// Categorize an OSM amenity tag.
pub fn categorize(a: &str) -> Option<AmenityType> {
for at in AmenityType::all() {
if at.types().contains(&a) {
return Some(at);
}
}
None
}
}
| {
return true;
} | conditional_block |
types.rs | use super::parser::{Expr, Literal, Pattern};
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum Type {
Int64,
Int32,
Float,
Dataset(im::HashMap<String, Type>),
/// T -> U
TyArr(Box<Type>, Box<Type>),
/// Type variable
TyVar(String),
// Data type
TyCon(String),
}
static COUNTER: std::sync::atomic::AtomicUsize = std::sync::atomic::AtomicUsize::new(1);
fn get_id() -> usize {
COUNTER.fetch_add(1, std::sync::atomic::Ordering::Relaxed)
}
fn get_item_type<'a>(
items: &[Expr],
env: &im::HashMap<String, Scheme>,
) -> Result<TypeRes<'a>, String> {
let mut ty = Type::TyVar(get_id().to_string());
for x in items {
let (_subs, ty2) = x.get_type(env)?;
let subs = unify(&ty, &ty2)?;
ty = apply_sub_type(&subs, &ty);
}
Ok((im::HashMap::new(), ty))
}
type TypeRes<'a> = (im::HashMap<String, Type>, Type);
pub type Scheme = (im::HashSet<String>, Type);
type Subs<'a> = &'a im::HashMap<String, Type>;
fn apply_sub_type(subs: Subs, ty: &Type) -> Type {
match ty {
Type::TyVar(name) => subs.get(name).unwrap_or_else(|| &ty).clone(),
Type::TyArr(t1, t2) => Type::TyArr(
Box::new(apply_sub_type(subs, t1)),
Box::new(apply_sub_type(subs, t2)),
),
_ => ty.clone(),
}
}
fn apply_sub_scheme(subs: Subs, scheme: Scheme) -> Scheme {
let mut subs1 = subs.clone();
for key in scheme.0.iter() {
subs1 = subs1.without(key);
}
let ty = apply_sub_type(&subs1, &scheme.1);
(scheme.0, ty)
}
fn apply_sub_env(
subs: &im::HashMap<String, Type>,
env: &im::HashMap<String, Scheme>,
) -> im::HashMap<String, Scheme> {
let mut h = im::HashMap::new();
for (key, value) in env.into_iter() {
h = h.update(key.to_string(), apply_sub_scheme(subs, value.clone()));
}
h
}
fn | (subs: Subs, subs2: Subs) -> im::HashMap<String, Type> {
let mut h = im::HashMap::new();
for (key, value) in subs.into_iter() {
h = h.update(key.to_string(), apply_sub_type(subs, &value.clone()));
}
h.union(subs2.clone())
}
fn ftv_ty(ty: &Type) -> im::HashSet<String> {
match ty {
Type::TyVar(a) => im::HashSet::unit(a.clone()),
Type::TyArr(ty1, ty2) => {
let x = ftv_ty(ty1);
let y = ftv_ty(ty2);
x.union(y)
}
_ => im::HashSet::new(),
}
}
fn ftv_env(env: &im::HashMap<String, Scheme>) -> im::HashSet<String> {
let ftvs = env.values().map(|x| ftv_ty(&x.1));
im::HashSet::unions(ftvs)
}
fn generalize(env: &im::HashMap<String, Scheme>, ty: &Type) -> Scheme {
let xs = ftv_ty(ty);
let ys = ftv_env(env);
let a = xs.difference(ys);
(a, ty.clone())
}
fn unify(ty1: &Type, ty2: &Type) -> Result<im::HashMap<String, Type>, String> {
match (ty1, ty2) {
(Type::TyArr(l, r), Type::TyArr(l1, r1)) => {
let s1 = unify(l, l1)?;
let s2 = unify(&apply_sub_type(&s1, &r), &apply_sub_type(&s1, &r1))?;
Ok(compose(&s2, &s1))
}
(Type::TyVar(a), t) => bind(&a, &t),
(t, Type::TyVar(a)) => bind(&a, &t),
(t1, t2) => {
if t1 == t2 {
Ok(im::HashMap::new())
} else {
Err("UnificationFail".to_string())
}
}
}
}
fn bind(var: &str, ty: &Type) -> Result<im::HashMap<String, Type>, String> {
if let Type::TyVar(x) = ty {
if var == x {
return Ok(im::HashMap::new());
}
}
if ftv_ty(ty).contains(var) {
return Err("Infinite Type".to_string());
}
Ok(im::HashMap::new().update(var.to_string(), ty.clone()))
}
fn type_pat(
env: &im::HashMap<String, Scheme>,
case_type: &Type,
pattern: &Pattern,
) -> Result<im::HashMap<String, Type>, String> {
// todo vars / wildcards, etc
let (_s, ty) = env.get(pattern.name).unwrap();
unify(case_type, ty)
}
/// Converts inner type of dataset
fn convert_inner(
env: &im::HashMap<String, Scheme>,
key: &str,
items: &[Expr],
) -> Result<(String, Type), String> {
let (_s, ty) = get_item_type(items, env)?;
Ok((key.to_string(), ty))
}
// Type inference using http://dev.stephendiehl.com/fun/006_hindley_milner.html#substitution
impl<'a> Expr<'_> {
pub fn get_type(&self, env: &im::HashMap<String, Scheme>) -> Result<TypeRes, String> {
match self {
Expr::Literal(l) => Ok((im::HashMap::new(), l.get_type())),
Expr::Ref(x) => {
let err = format!("Could not find reference {}", x);
let ty = env.get(*x).cloned().ok_or(err)?;
Ok((im::HashMap::new(), ty.1))
}
Expr::LetIn(x) => {
let (s1, t1) = x.expr1.expr.get_type(env)?;
let env1 = apply_sub_env(&s1, env);
let t2 = generalize(&env1, &t1);
let extended_ty = env.update(x.name.to_string(), t2);
let (s2, t2) = x.expr2.expr.get_type(&extended_ty)?;
Ok((compose(&s1, &s2), t2))
}
Expr::DataSet(items) => {
let d: im::HashMap<String, Type> = items
.iter()
.map(|(k, items)| convert_inner(env, k, items))
.flatten()
.collect();
if d.len() == items.len() {
Ok((im::HashMap::new(), Type::Dataset(d)))
} else {
Err("Not all rows matched in type".to_string())
}
}
Expr::Lambda(name, expr) => {
let type_var = Type::TyVar(get_id().to_string()); //fresh();
let env1 = env.update((*name).to_string(), (im::HashSet::new(), type_var.clone()));
let (sub, t1) = expr.expr.get_type(&env1)?;
let substituted = apply_sub_type(&sub, &type_var);
Ok((sub, Type::TyArr(Box::new(substituted), Box::new(t1))))
}
Expr::App(expr1, expr2) => {
let tv = Type::TyVar(get_id().to_string());
let (s1, t1) = expr1.get_type(env)?;
let (s2, t2) = expr2.get_type(&apply_sub_env(&s1, env))?;
let s3 = unify(
&apply_sub_type(&s2, &t1),
&Type::TyArr(Box::new(t2), Box::new(tv.clone())),
)?;
Ok((compose(&compose(&s3, &s2), &s1), apply_sub_type(&s3, &tv)))
}
Expr::Match(expr, exprs) => {
let (mut subs, case_type) = expr.get_type(env)?;
let mut branch_type = Type::TyVar(get_id().to_string());
for (p, branch) in exprs {
// TODO check, test
let pat_sub = type_pat(env, &case_type, p)?;
subs = compose(&subs, &pat_sub);
let (s, n_branch_type) = branch.get_type(env)?;
subs = compose(&subs, &s);
let cur_branch_type = apply_sub_type(&subs, &n_branch_type);
let s2 = unify(&branch_type, &cur_branch_type)?;
subs = compose(&subs, &s2);
branch_type = apply_sub_type(&subs, &branch_type);
}
Ok((subs, branch_type))
}
Expr::Projection(names, expr) => {
let from_ty = expr.get_type(env)?;
match from_ty {
(_s, Type::Dataset(items)) => {
if names
.iter()
.filter(|x|!items.contains_key(&x.to_string()))
.count()
> 0
{
// TODO; improve error
return Err("Not all fields in dataset".to_string());
}
Ok((
im::HashMap::new(),
Type::Dataset(
items
.iter()
.filter(|(k, _v)| names.contains(&&*k.to_string()))
.map(|(k, v)| (k.to_string(), v.clone()))
.collect(),
),
))
}
_ => Err("Expected dataset".to_string()),
}
}
x => Err(format!("not implemented {:?}", x)),
}
}
}
impl Literal {
fn get_type(&self) -> Type {
match self {
Literal::Int64(_) => Type::Int64,
Literal::Int32(_) => Type::Int32,
Literal::Float(_) => Type::Float,
}
}
}
#[cfg(test)]
use super::parser::{expression, Span};
#[test]
fn test_type() {
assert_eq!(Literal::Float(1.0).get_type(), Type::Float);
assert_eq!(Literal::Int64(1).get_type(), Type::Int64);
}
#[test]
fn test_type_let() {
let (_, expr) = expression(Span::new("let x = 1 in x")).unwrap();
assert_eq!(expr.get_type(&im::HashMap::new()).unwrap().1, Type::Int64);
}
#[test]
fn test_type_lam() {
let (_, expr) = expression(Span::new(r"\x -> x")).unwrap();
let ty = expr.get_type(&im::HashMap::new()).unwrap().1;
match ty {
Type::TyArr(x, y) => assert_eq!(x, y),
_ => panic!("Did not expect non-tyarr result"),
}
}
#[test]
fn test_type_lam_app() {
let (_, expr) = expression(Span::new(r"let id = \x -> x in id 1")).unwrap();
let ty = expr.get_type(&im::HashMap::new()).unwrap().1;
assert_eq!(ty, Type::Int64);
}
#[test]
fn test_type_sql() {
let (_, expr) = expression(Span::new("let t = {a\n1} in select a from t")).unwrap();
let ty = expr.get_type(&im::HashMap::new()).unwrap().1;
assert_eq!(
ty,
Type::Dataset([("a".to_string(), Type::Int64)].iter().cloned().collect())
);
}
#[test]
fn test_multiple_rows() {
let (_, expr) = expression(Span::new("let t = {a\n1\n2\n3} in t")).unwrap();
let ty = expr.get_type(&im::HashMap::new()).unwrap().1;
assert_eq!(
ty,
Type::Dataset([("a".to_string(), Type::Int64)].iter().cloned().collect())
);
}
#[test]
fn test_multiple_rows_incompatible() {
let (_, expr) = expression(Span::new("let t = {a\n1\n2\n3.0} in t")).unwrap();
let ty = expr.get_type(&im::HashMap::new());
assert!(ty.is_err());
}
| compose | identifier_name |
types.rs | use super::parser::{Expr, Literal, Pattern};
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum Type {
Int64,
Int32,
Float,
Dataset(im::HashMap<String, Type>),
/// T -> U
TyArr(Box<Type>, Box<Type>),
/// Type variable
TyVar(String),
// Data type
TyCon(String),
}
static COUNTER: std::sync::atomic::AtomicUsize = std::sync::atomic::AtomicUsize::new(1);
fn get_id() -> usize {
COUNTER.fetch_add(1, std::sync::atomic::Ordering::Relaxed)
}
fn get_item_type<'a>(
items: &[Expr],
env: &im::HashMap<String, Scheme>,
) -> Result<TypeRes<'a>, String> {
let mut ty = Type::TyVar(get_id().to_string());
for x in items {
let (_subs, ty2) = x.get_type(env)?;
let subs = unify(&ty, &ty2)?;
ty = apply_sub_type(&subs, &ty);
}
Ok((im::HashMap::new(), ty))
}
type TypeRes<'a> = (im::HashMap<String, Type>, Type);
pub type Scheme = (im::HashSet<String>, Type);
type Subs<'a> = &'a im::HashMap<String, Type>;
fn apply_sub_type(subs: Subs, ty: &Type) -> Type {
match ty {
Type::TyVar(name) => subs.get(name).unwrap_or_else(|| &ty).clone(),
Type::TyArr(t1, t2) => Type::TyArr(
Box::new(apply_sub_type(subs, t1)),
Box::new(apply_sub_type(subs, t2)),
),
_ => ty.clone(),
}
}
fn apply_sub_scheme(subs: Subs, scheme: Scheme) -> Scheme {
let mut subs1 = subs.clone();
for key in scheme.0.iter() {
subs1 = subs1.without(key);
}
let ty = apply_sub_type(&subs1, &scheme.1);
(scheme.0, ty)
}
fn apply_sub_env(
subs: &im::HashMap<String, Type>,
env: &im::HashMap<String, Scheme>,
) -> im::HashMap<String, Scheme> {
let mut h = im::HashMap::new();
for (key, value) in env.into_iter() {
h = h.update(key.to_string(), apply_sub_scheme(subs, value.clone()));
}
h
}
fn compose(subs: Subs, subs2: Subs) -> im::HashMap<String, Type> {
let mut h = im::HashMap::new();
for (key, value) in subs.into_iter() {
h = h.update(key.to_string(), apply_sub_type(subs, &value.clone()));
}
h.union(subs2.clone())
}
fn ftv_ty(ty: &Type) -> im::HashSet<String> {
match ty {
Type::TyVar(a) => im::HashSet::unit(a.clone()),
Type::TyArr(ty1, ty2) => {
let x = ftv_ty(ty1);
let y = ftv_ty(ty2);
x.union(y)
}
_ => im::HashSet::new(),
}
}
fn ftv_env(env: &im::HashMap<String, Scheme>) -> im::HashSet<String> {
let ftvs = env.values().map(|x| ftv_ty(&x.1));
im::HashSet::unions(ftvs)
}
fn generalize(env: &im::HashMap<String, Scheme>, ty: &Type) -> Scheme |
fn unify(ty1: &Type, ty2: &Type) -> Result<im::HashMap<String, Type>, String> {
match (ty1, ty2) {
(Type::TyArr(l, r), Type::TyArr(l1, r1)) => {
let s1 = unify(l, l1)?;
let s2 = unify(&apply_sub_type(&s1, &r), &apply_sub_type(&s1, &r1))?;
Ok(compose(&s2, &s1))
}
(Type::TyVar(a), t) => bind(&a, &t),
(t, Type::TyVar(a)) => bind(&a, &t),
(t1, t2) => {
if t1 == t2 {
Ok(im::HashMap::new())
} else {
Err("UnificationFail".to_string())
}
}
}
}
fn bind(var: &str, ty: &Type) -> Result<im::HashMap<String, Type>, String> {
if let Type::TyVar(x) = ty {
if var == x {
return Ok(im::HashMap::new());
}
}
if ftv_ty(ty).contains(var) {
return Err("Infinite Type".to_string());
}
Ok(im::HashMap::new().update(var.to_string(), ty.clone()))
}
fn type_pat(
env: &im::HashMap<String, Scheme>,
case_type: &Type,
pattern: &Pattern,
) -> Result<im::HashMap<String, Type>, String> {
// todo vars / wildcards, etc
let (_s, ty) = env.get(pattern.name).unwrap();
unify(case_type, ty)
}
/// Converts inner type of dataset
fn convert_inner(
env: &im::HashMap<String, Scheme>,
key: &str,
items: &[Expr],
) -> Result<(String, Type), String> {
let (_s, ty) = get_item_type(items, env)?;
Ok((key.to_string(), ty))
}
// Type inference using http://dev.stephendiehl.com/fun/006_hindley_milner.html#substitution
impl<'a> Expr<'_> {
pub fn get_type(&self, env: &im::HashMap<String, Scheme>) -> Result<TypeRes, String> {
match self {
Expr::Literal(l) => Ok((im::HashMap::new(), l.get_type())),
Expr::Ref(x) => {
let err = format!("Could not find reference {}", x);
let ty = env.get(*x).cloned().ok_or(err)?;
Ok((im::HashMap::new(), ty.1))
}
Expr::LetIn(x) => {
let (s1, t1) = x.expr1.expr.get_type(env)?;
let env1 = apply_sub_env(&s1, env);
let t2 = generalize(&env1, &t1);
let extended_ty = env.update(x.name.to_string(), t2);
let (s2, t2) = x.expr2.expr.get_type(&extended_ty)?;
Ok((compose(&s1, &s2), t2))
}
Expr::DataSet(items) => {
let d: im::HashMap<String, Type> = items
.iter()
.map(|(k, items)| convert_inner(env, k, items))
.flatten()
.collect();
if d.len() == items.len() {
Ok((im::HashMap::new(), Type::Dataset(d)))
} else {
Err("Not all rows matched in type".to_string())
}
}
Expr::Lambda(name, expr) => {
let type_var = Type::TyVar(get_id().to_string()); //fresh();
let env1 = env.update((*name).to_string(), (im::HashSet::new(), type_var.clone()));
let (sub, t1) = expr.expr.get_type(&env1)?;
let substituted = apply_sub_type(&sub, &type_var);
Ok((sub, Type::TyArr(Box::new(substituted), Box::new(t1))))
}
Expr::App(expr1, expr2) => {
let tv = Type::TyVar(get_id().to_string());
let (s1, t1) = expr1.get_type(env)?;
let (s2, t2) = expr2.get_type(&apply_sub_env(&s1, env))?;
let s3 = unify(
&apply_sub_type(&s2, &t1),
&Type::TyArr(Box::new(t2), Box::new(tv.clone())),
)?;
Ok((compose(&compose(&s3, &s2), &s1), apply_sub_type(&s3, &tv)))
}
Expr::Match(expr, exprs) => {
let (mut subs, case_type) = expr.get_type(env)?;
let mut branch_type = Type::TyVar(get_id().to_string());
for (p, branch) in exprs {
// TODO check, test
let pat_sub = type_pat(env, &case_type, p)?;
subs = compose(&subs, &pat_sub);
let (s, n_branch_type) = branch.get_type(env)?;
subs = compose(&subs, &s);
let cur_branch_type = apply_sub_type(&subs, &n_branch_type);
let s2 = unify(&branch_type, &cur_branch_type)?;
subs = compose(&subs, &s2);
branch_type = apply_sub_type(&subs, &branch_type);
}
Ok((subs, branch_type))
}
Expr::Projection(names, expr) => {
let from_ty = expr.get_type(env)?;
match from_ty {
(_s, Type::Dataset(items)) => {
if names
.iter()
.filter(|x|!items.contains_key(&x.to_string()))
.count()
> 0
{
// TODO; improve error
return Err("Not all fields in dataset".to_string());
}
Ok((
im::HashMap::new(),
Type::Dataset(
items
.iter()
.filter(|(k, _v)| names.contains(&&*k.to_string()))
.map(|(k, v)| (k.to_string(), v.clone()))
.collect(),
),
))
}
_ => Err("Expected dataset".to_string()),
}
}
x => Err(format!("not implemented {:?}", x)),
}
}
}
impl Literal {
fn get_type(&self) -> Type {
match self {
Literal::Int64(_) => Type::Int64,
Literal::Int32(_) => Type::Int32,
Literal::Float(_) => Type::Float,
}
}
}
#[cfg(test)]
use super::parser::{expression, Span};
#[test]
fn test_type() {
assert_eq!(Literal::Float(1.0).get_type(), Type::Float);
assert_eq!(Literal::Int64(1).get_type(), Type::Int64);
}
#[test]
fn test_type_let() {
let (_, expr) = expression(Span::new("let x = 1 in x")).unwrap();
assert_eq!(expr.get_type(&im::HashMap::new()).unwrap().1, Type::Int64);
}
#[test]
fn test_type_lam() {
let (_, expr) = expression(Span::new(r"\x -> x")).unwrap();
let ty = expr.get_type(&im::HashMap::new()).unwrap().1;
match ty {
Type::TyArr(x, y) => assert_eq!(x, y),
_ => panic!("Did not expect non-tyarr result"),
}
}
#[test]
fn test_type_lam_app() {
let (_, expr) = expression(Span::new(r"let id = \x -> x in id 1")).unwrap();
let ty = expr.get_type(&im::HashMap::new()).unwrap().1;
assert_eq!(ty, Type::Int64);
}
#[test]
fn test_type_sql() {
let (_, expr) = expression(Span::new("let t = {a\n1} in select a from t")).unwrap();
let ty = expr.get_type(&im::HashMap::new()).unwrap().1;
assert_eq!(
ty,
Type::Dataset([("a".to_string(), Type::Int64)].iter().cloned().collect())
);
}
#[test]
fn test_multiple_rows() {
let (_, expr) = expression(Span::new("let t = {a\n1\n2\n3} in t")).unwrap();
let ty = expr.get_type(&im::HashMap::new()).unwrap().1;
assert_eq!(
ty,
Type::Dataset([("a".to_string(), Type::Int64)].iter().cloned().collect())
);
}
#[test]
fn test_multiple_rows_incompatible() {
let (_, expr) = expression(Span::new("let t = {a\n1\n2\n3.0} in t")).unwrap();
let ty = expr.get_type(&im::HashMap::new());
assert!(ty.is_err());
}
| {
let xs = ftv_ty(ty);
let ys = ftv_env(env);
let a = xs.difference(ys);
(a, ty.clone())
} | identifier_body |
types.rs | use super::parser::{Expr, Literal, Pattern};
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum Type {
Int64,
Int32,
Float,
Dataset(im::HashMap<String, Type>),
/// T -> U
TyArr(Box<Type>, Box<Type>),
/// Type variable
TyVar(String),
// Data type
TyCon(String),
}
static COUNTER: std::sync::atomic::AtomicUsize = std::sync::atomic::AtomicUsize::new(1);
fn get_id() -> usize {
COUNTER.fetch_add(1, std::sync::atomic::Ordering::Relaxed)
}
fn get_item_type<'a>(
items: &[Expr],
env: &im::HashMap<String, Scheme>,
) -> Result<TypeRes<'a>, String> {
let mut ty = Type::TyVar(get_id().to_string());
for x in items {
let (_subs, ty2) = x.get_type(env)?;
let subs = unify(&ty, &ty2)?;
ty = apply_sub_type(&subs, &ty);
}
Ok((im::HashMap::new(), ty))
}
type TypeRes<'a> = (im::HashMap<String, Type>, Type);
pub type Scheme = (im::HashSet<String>, Type);
type Subs<'a> = &'a im::HashMap<String, Type>;
fn apply_sub_type(subs: Subs, ty: &Type) -> Type {
match ty {
Type::TyVar(name) => subs.get(name).unwrap_or_else(|| &ty).clone(),
Type::TyArr(t1, t2) => Type::TyArr(
Box::new(apply_sub_type(subs, t1)),
Box::new(apply_sub_type(subs, t2)),
),
_ => ty.clone(),
}
}
fn apply_sub_scheme(subs: Subs, scheme: Scheme) -> Scheme {
let mut subs1 = subs.clone();
for key in scheme.0.iter() {
subs1 = subs1.without(key);
}
let ty = apply_sub_type(&subs1, &scheme.1);
(scheme.0, ty)
}
fn apply_sub_env(
subs: &im::HashMap<String, Type>,
env: &im::HashMap<String, Scheme>,
) -> im::HashMap<String, Scheme> {
let mut h = im::HashMap::new();
for (key, value) in env.into_iter() {
h = h.update(key.to_string(), apply_sub_scheme(subs, value.clone()));
}
h
}
fn compose(subs: Subs, subs2: Subs) -> im::HashMap<String, Type> {
let mut h = im::HashMap::new();
for (key, value) in subs.into_iter() {
h = h.update(key.to_string(), apply_sub_type(subs, &value.clone()));
}
h.union(subs2.clone())
}
fn ftv_ty(ty: &Type) -> im::HashSet<String> {
match ty {
Type::TyVar(a) => im::HashSet::unit(a.clone()),
Type::TyArr(ty1, ty2) => {
let x = ftv_ty(ty1);
let y = ftv_ty(ty2);
x.union(y)
}
_ => im::HashSet::new(),
}
}
fn ftv_env(env: &im::HashMap<String, Scheme>) -> im::HashSet<String> {
let ftvs = env.values().map(|x| ftv_ty(&x.1));
im::HashSet::unions(ftvs)
}
fn generalize(env: &im::HashMap<String, Scheme>, ty: &Type) -> Scheme {
let xs = ftv_ty(ty);
let ys = ftv_env(env);
let a = xs.difference(ys);
(a, ty.clone())
}
fn unify(ty1: &Type, ty2: &Type) -> Result<im::HashMap<String, Type>, String> {
match (ty1, ty2) {
(Type::TyArr(l, r), Type::TyArr(l1, r1)) => {
let s1 = unify(l, l1)?;
let s2 = unify(&apply_sub_type(&s1, &r), &apply_sub_type(&s1, &r1))?;
Ok(compose(&s2, &s1))
}
(Type::TyVar(a), t) => bind(&a, &t),
(t, Type::TyVar(a)) => bind(&a, &t),
(t1, t2) => {
if t1 == t2 {
Ok(im::HashMap::new())
} else {
Err("UnificationFail".to_string())
}
}
}
}
fn bind(var: &str, ty: &Type) -> Result<im::HashMap<String, Type>, String> {
if let Type::TyVar(x) = ty {
if var == x {
return Ok(im::HashMap::new());
}
}
if ftv_ty(ty).contains(var) {
return Err("Infinite Type".to_string());
}
Ok(im::HashMap::new().update(var.to_string(), ty.clone()))
}
fn type_pat(
env: &im::HashMap<String, Scheme>,
case_type: &Type,
pattern: &Pattern,
) -> Result<im::HashMap<String, Type>, String> {
// todo vars / wildcards, etc
let (_s, ty) = env.get(pattern.name).unwrap();
unify(case_type, ty)
}
/// Converts inner type of dataset
fn convert_inner(
env: &im::HashMap<String, Scheme>,
key: &str,
items: &[Expr],
) -> Result<(String, Type), String> {
let (_s, ty) = get_item_type(items, env)?;
Ok((key.to_string(), ty))
}
// Type inference using http://dev.stephendiehl.com/fun/006_hindley_milner.html#substitution
impl<'a> Expr<'_> {
pub fn get_type(&self, env: &im::HashMap<String, Scheme>) -> Result<TypeRes, String> {
match self {
Expr::Literal(l) => Ok((im::HashMap::new(), l.get_type())),
Expr::Ref(x) => {
let err = format!("Could not find reference {}", x);
let ty = env.get(*x).cloned().ok_or(err)?;
Ok((im::HashMap::new(), ty.1))
}
Expr::LetIn(x) => {
let (s1, t1) = x.expr1.expr.get_type(env)?;
let env1 = apply_sub_env(&s1, env);
let t2 = generalize(&env1, &t1);
let extended_ty = env.update(x.name.to_string(), t2);
let (s2, t2) = x.expr2.expr.get_type(&extended_ty)?;
Ok((compose(&s1, &s2), t2))
}
Expr::DataSet(items) => { | .iter()
.map(|(k, items)| convert_inner(env, k, items))
.flatten()
.collect();
if d.len() == items.len() {
Ok((im::HashMap::new(), Type::Dataset(d)))
} else {
Err("Not all rows matched in type".to_string())
}
}
Expr::Lambda(name, expr) => {
let type_var = Type::TyVar(get_id().to_string()); //fresh();
let env1 = env.update((*name).to_string(), (im::HashSet::new(), type_var.clone()));
let (sub, t1) = expr.expr.get_type(&env1)?;
let substituted = apply_sub_type(&sub, &type_var);
Ok((sub, Type::TyArr(Box::new(substituted), Box::new(t1))))
}
Expr::App(expr1, expr2) => {
let tv = Type::TyVar(get_id().to_string());
let (s1, t1) = expr1.get_type(env)?;
let (s2, t2) = expr2.get_type(&apply_sub_env(&s1, env))?;
let s3 = unify(
&apply_sub_type(&s2, &t1),
&Type::TyArr(Box::new(t2), Box::new(tv.clone())),
)?;
Ok((compose(&compose(&s3, &s2), &s1), apply_sub_type(&s3, &tv)))
}
Expr::Match(expr, exprs) => {
let (mut subs, case_type) = expr.get_type(env)?;
let mut branch_type = Type::TyVar(get_id().to_string());
for (p, branch) in exprs {
// TODO check, test
let pat_sub = type_pat(env, &case_type, p)?;
subs = compose(&subs, &pat_sub);
let (s, n_branch_type) = branch.get_type(env)?;
subs = compose(&subs, &s);
let cur_branch_type = apply_sub_type(&subs, &n_branch_type);
let s2 = unify(&branch_type, &cur_branch_type)?;
subs = compose(&subs, &s2);
branch_type = apply_sub_type(&subs, &branch_type);
}
Ok((subs, branch_type))
}
Expr::Projection(names, expr) => {
let from_ty = expr.get_type(env)?;
match from_ty {
(_s, Type::Dataset(items)) => {
if names
.iter()
.filter(|x|!items.contains_key(&x.to_string()))
.count()
> 0
{
// TODO; improve error
return Err("Not all fields in dataset".to_string());
}
Ok((
im::HashMap::new(),
Type::Dataset(
items
.iter()
.filter(|(k, _v)| names.contains(&&*k.to_string()))
.map(|(k, v)| (k.to_string(), v.clone()))
.collect(),
),
))
}
_ => Err("Expected dataset".to_string()),
}
}
x => Err(format!("not implemented {:?}", x)),
}
}
}
impl Literal {
fn get_type(&self) -> Type {
match self {
Literal::Int64(_) => Type::Int64,
Literal::Int32(_) => Type::Int32,
Literal::Float(_) => Type::Float,
}
}
}
#[cfg(test)]
use super::parser::{expression, Span};
#[test]
fn test_type() {
assert_eq!(Literal::Float(1.0).get_type(), Type::Float);
assert_eq!(Literal::Int64(1).get_type(), Type::Int64);
}
#[test]
fn test_type_let() {
let (_, expr) = expression(Span::new("let x = 1 in x")).unwrap();
assert_eq!(expr.get_type(&im::HashMap::new()).unwrap().1, Type::Int64);
}
#[test]
fn test_type_lam() {
let (_, expr) = expression(Span::new(r"\x -> x")).unwrap();
let ty = expr.get_type(&im::HashMap::new()).unwrap().1;
match ty {
Type::TyArr(x, y) => assert_eq!(x, y),
_ => panic!("Did not expect non-tyarr result"),
}
}
#[test]
fn test_type_lam_app() {
let (_, expr) = expression(Span::new(r"let id = \x -> x in id 1")).unwrap();
let ty = expr.get_type(&im::HashMap::new()).unwrap().1;
assert_eq!(ty, Type::Int64);
}
#[test]
fn test_type_sql() {
let (_, expr) = expression(Span::new("let t = {a\n1} in select a from t")).unwrap();
let ty = expr.get_type(&im::HashMap::new()).unwrap().1;
assert_eq!(
ty,
Type::Dataset([("a".to_string(), Type::Int64)].iter().cloned().collect())
);
}
#[test]
fn test_multiple_rows() {
let (_, expr) = expression(Span::new("let t = {a\n1\n2\n3} in t")).unwrap();
let ty = expr.get_type(&im::HashMap::new()).unwrap().1;
assert_eq!(
ty,
Type::Dataset([("a".to_string(), Type::Int64)].iter().cloned().collect())
);
}
#[test]
fn test_multiple_rows_incompatible() {
let (_, expr) = expression(Span::new("let t = {a\n1\n2\n3.0} in t")).unwrap();
let ty = expr.get_type(&im::HashMap::new());
assert!(ty.is_err());
} | let d: im::HashMap<String, Type> = items | random_line_split |
util.rs | use super::{
converter::BaseConvertInfo,
flags::RuntimeHelper,
ir::{JsExpr as Js, VNodeIR},
parser::{Directive, DirectiveArg, ElemProp, Element},
scanner::Attribute,
};
use std::{
borrow::{Borrow, BorrowMut},
cell::UnsafeCell,
marker::PhantomData,
ops::Deref,
};
#[macro_export]
macro_rules! cast {
($target: expr, $pat: path) => {{
if let $pat(a,..) = $target {
a
} else {
panic!("mismatch variant when cast to {}", stringify!($pat));
}
}};
}
mod decode_html;
mod json;
mod named_chars;
pub mod rslint;
mod v_str;
pub use v_str::VStr;
pub fn non_whitespace(c: char) -> bool {
!c.is_ascii_whitespace()
}
pub fn get_core_component(tag: &str) -> Option<RuntimeHelper> {
use RuntimeHelper as RH;
Some(match tag {
"Teleport" | "teleport" => RH::TELEPORT,
"Suspense" | "suspense" => RH::SUSPENSE,
"KeepAlive" | "keep-alive" => RH::KEEP_ALIVE,
"BaseTransition" | "base-transition" => RH::BASE_TRANSITION,
_ => return None,
})
}
pub fn is_core_component(tag: &str) -> bool {
get_core_component(tag).is_some()
}
fn is_event_prop(prop: &str) -> bool {
let bytes = prop.as_bytes();
// equivalent to /^on[^a-z]/
bytes.len() > 2 && bytes.starts_with(b"on") &&!bytes[3].is_ascii_lowercase()
}
pub fn is_mergeable_prop(prop: &str) -> bool {
prop == "class" || prop == "style" || is_event_prop(prop)
}
#[inline]
pub fn not_js_identifier(c: char) -> bool {
!c.is_alphanumeric() && c!= '$' && c!= '_'
}
pub fn is_simple_identifier(s: VStr) -> bool {
if VStr::has_affix(&s) {
return false;
}
let is_ident = |c|!not_js_identifier(c);
let raw = s.raw;
raw.chars().all(is_ident) &&!raw.starts_with(|c: char| c.is_ascii_digit())
}
macro_rules! make_list {
( $($id: ident),* ) => {
&[
$(stringify!($id)),*
]
}
}
// use simple contains for small str array
// benchmark shows linear scan takes at most 10ns
// while phf or bsearch takes 30ns
const ALLOWED_GLOBALS: &[&str] = make_list![
Infinity,
undefined,
NaN,
isFinite,
isNaN,
parseFloat,
parseInt,
decodeURI,
decodeURIComponent,
encodeURI,
encodeURIComponent,
Math,
Number,
Date,
Array,
Object,
Boolean,
String,
RegExp,
Map,
Set,
JSON,
Intl,
BigInt
];
pub fn is_global_allow_listed(s: &str) -> bool {
ALLOWED_GLOBALS.contains(&s)
}
// https://github.com/vuejs/rfcs/blob/master/active-rfcs/0008-render-function-api-change.md#special-reserved-props
const RESERVED: &[&str] = make_list![
key,
ref,
onVnodeMounted,
onVnodeUpdated,
onVnodeUnmounted,
onVnodeBeforeMount,
onVnodeBeforeUpdate,
onVnodeBeforeUnmount
];
#[inline]
pub fn is_reserved_prop(tag: &str) -> bool {
RESERVED.contains(&tag)
}
pub fn is_component_tag(tag: &str) -> bool {
tag == "component" || tag == "Component"
}
pub const fn yes(_: &str) -> bool {
true
}
pub const fn no(_: &str) -> bool {
false
}
pub fn get_vnode_call_helper(v: &VNodeIR<BaseConvertInfo>) -> RuntimeHelper {
use RuntimeHelper as RH;
if v.is_block {
return if v.is_component {
RH::CREATE_BLOCK
} else {
RH::CREATE_ELEMENT_BLOCK
};
}
if v.is_component {
RH::CREATE_VNODE
} else {
RH::CREATE_ELEMENT_VNODE
}
}
pub fn is_builtin_symbol(tag: &Js, helper: RuntimeHelper) -> bool {
if let Js::Symbol(r) = tag {
r == &helper
} else {
false
}
}
pub trait PropPattern {
fn matches(&self, name: &str) -> bool;
}
impl PropPattern for &str {
fn matches(&self, name: &str) -> bool {
name == *self
}
}
impl<F> PropPattern for F
where
F: Fn(&str) -> bool,
{
fn matches(&self, name: &str) -> bool {
self(name)
}
}
impl<const N: usize> PropPattern for [&'static str; N] {
fn matches(&self, name: &str) -> bool {
self.contains(&name)
}
}
type NameExp<'a> = Option<(&'a str, Option<VStr<'a>>)>;
pub trait PropMatcher<'a> {
fn get_name_and_exp(prop: &ElemProp<'a>) -> NameExp<'a>;
fn get_ref<'b>(prop: &'b ElemProp<'a>) -> &'b Self;
fn take(prop: ElemProp<'a>) -> Self;
fn is_match<P>(p: &ElemProp<'a>, pat: &P, allow_empty: bool) -> bool
where
P: PropPattern,
{
Self::get_name_and_exp(p).map_or(false, |(name, exp)| {
pat.matches(name) && (allow_empty ||!exp.map_or(true, |v| v.is_empty()))
})
}
}
pub fn is_bind_key<'a>(arg: &Option<DirectiveArg<'a>>, name: &str) -> bool {
get_bind_key(arg).map_or(false, |v| v == name)
}
fn get_bind_key<'a>(arg: &Option<DirectiveArg<'a>>) -> Option<&'a str> {
if let DirectiveArg::Static(name) = arg.as_ref()? {
Some(name)
} else {
None
}
}
impl<'a> PropMatcher<'a> for ElemProp<'a> {
fn get_name_and_exp(prop: &ElemProp<'a>) -> NameExp<'a> {
match prop {
ElemProp::Attr(Attribute { name, value,.. }) => {
let exp = value.as_ref().map(|v| v.content);
Some((name, exp))
}
ElemProp::Dir(dir @ Directive { name: "bind",.. }) => {
let name = get_bind_key(&dir.argument)?;
let exp = dir.expression.as_ref().map(|v| v.content);
Some((name, exp))
}
_ => None,
}
}
fn get_ref<'b>(prop: &'b ElemProp<'a>) -> &'b Self {
prop
}
fn take(prop: ElemProp<'a>) -> Self {
prop
}
}
impl<'a> PropMatcher<'a> for Directive<'a> {
fn get_name_and_exp(prop: &ElemProp<'a>) -> NameExp<'a> {
if let ElemProp::Dir(Directive {
name, expression,..
}) = prop
{
let exp = expression.as_ref().map(|v| v.content);
Some((name, exp))
} else {
None
}
}
fn get_ref<'b>(prop: &'b ElemProp<'a>) -> &'b Self {
if let ElemProp::Dir(dir) = prop {
return dir;
}
unreachable!("invalid call")
}
fn take(prop: ElemProp<'a>) -> Self {
if let ElemProp::Dir(dir) = prop {
return dir;
}
unreachable!("invalid call")
}
}
pub struct PropFound<'a, E, M>
where
E: Borrow<Element<'a>>,
M: PropMatcher<'a>,
{
elem: E,
pos: usize,
m: PhantomData<&'a M>,
}
impl<'a, E, M> PropFound<'a, E, M>
where
E: Borrow<Element<'a>>,
M: PropMatcher<'a>,
{
fn new(elem: E, pos: usize) -> Option<Self> {
Some(Self {
elem,
pos,
m: PhantomData,
})
}
pub fn get_ref(&self) -> &M {
M::get_ref(&self.elem.borrow().properties[self.pos])
}
}
// take is only available when access is mutable
impl<'a, E, M> PropFound<'a, E, M>
where
E: BorrowMut<Element<'a>>,
M: PropMatcher<'a>, | }
type DirFound<'a, E> = PropFound<'a, E, Directive<'a>>;
// sometimes mutable access to the element is not available so
// Borrow is used to refine PropFound so `take` is optional
pub fn dir_finder<'a, E, P>(elem: E, pat: P) -> PropFinder<'a, E, P, Directive<'a>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat)
}
pub fn find_dir<'a, E, P>(elem: E, pat: P) -> Option<DirFound<'a, E>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat).find()
}
pub fn find_dir_empty<'a, E, P>(elem: E, pat: P) -> Option<DirFound<'a, E>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat).allow_empty().find()
}
pub struct PropFinder<'a, E, P, M = ElemProp<'a>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
M: PropMatcher<'a>,
{
elem: E,
pat: P,
allow_empty: bool,
filter: fn(&ElemProp<'a>) -> bool,
m: PhantomData<&'a M>,
}
impl<'a, E, P, M> PropFinder<'a, E, P, M>
where
E: Borrow<Element<'a>>,
P: PropPattern,
M: PropMatcher<'a>,
{
fn new(elem: E, pat: P) -> Self {
Self {
elem,
pat,
allow_empty: false,
filter: |_| true,
m: PhantomData,
}
}
fn is_match(&self, p: &ElemProp<'a>) -> bool {
M::is_match(p, &self.pat, self.allow_empty)
}
pub fn dynamic_only(self) -> Self {
Self {
filter: |p| matches!(p, ElemProp::Dir(..)),
..self
}
}
pub fn attr_only(self) -> Self {
Self {
filter: |p| matches!(p, ElemProp::Attr(..)),
..self
}
}
pub fn find(self) -> Option<PropFound<'a, E, M>> {
let pos = self
.elem
.borrow()
.properties
.iter()
.position(|p| self.is_match(p) && (self.filter)(p))?;
PropFound::new(self.elem, pos)
}
pub fn allow_empty(self) -> Self {
Self {
allow_empty: true,
..self
}
}
}
impl<'a, P> PropFinder<'a, Element<'a>, P, ElemProp<'a>>
where
P: PropPattern + Copy,
{
pub fn find_all(self) -> impl Iterator<Item = Result<ElemProp<'a>, ElemProp<'a>>> {
let PropFinder {
elem,
pat,
allow_empty,
..
} = self;
elem.properties.into_iter().map(move |p| {
if ElemProp::is_match(&p, &pat, allow_empty) {
Ok(p)
} else {
Err(p)
}
})
}
}
pub fn find_prop<'a, E, P>(elem: E, pat: P) -> Option<PropFound<'a, E, ElemProp<'a>>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat).find()
}
pub fn prop_finder<'a, E, P>(elem: E, pat: P) -> PropFinder<'a, E, P>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat)
}
// since std::lazy::Lazy is not stable
// it is not thread safe, not Sync.
// it is Send if F and T is Send
pub struct Lazy<T, F = fn() -> T>(UnsafeCell<Result<T, Option<F>>>)
where
F: FnOnce() -> T;
impl<T, F> Lazy<T, F>
where
F: FnOnce() -> T,
{
pub fn new(f: F) -> Self {
Self(UnsafeCell::new(Err(Some(f))))
}
}
impl<T, F> Deref for Lazy<T, F>
where
F: FnOnce() -> T,
{
type Target = T;
fn deref(&self) -> &Self::Target {
let m = unsafe { &mut *self.0.get() };
let f = match m {
Ok(t) => return t,
Err(f) => f,
};
*m = Ok(f.take().unwrap()());
match m {
Ok(t) => t,
_ => panic!("unwrap Ok"),
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::parser::test::mock_element;
#[test]
fn test_find_dir() {
let e = mock_element("<p v-if=true/>");
let found = find_dir(&e, "if");
let found = found.expect("should found directive");
assert_eq!(found.get_ref().name, "if");
assert_eq!(e.properties.len(), 1);
}
#[test]
fn test_find_dir_mut() {
let mut e = mock_element("<p v-if=true/>");
let found = find_dir(&mut e, "if");
let found = found.expect("should found directive");
assert_eq!(found.get_ref().name, "if");
assert_eq!(found.take().name, "if");
assert!(e.properties.is_empty());
}
#[test]
fn test_find_empty_dir() {
let e = mock_element("<p v-if=true v-for>");
assert!(find_dir(&e, "if").is_some());
assert!(find_dir(&e, "for").is_none());
let found = dir_finder(&e, "for").allow_empty().find();
assert!(found.is_some());
}
#[test]
fn test_find_prop() {
let mut e = mock_element("<p :name=foo name=bar/>");
assert!(find_dir(&e, "name").is_none());
assert!(find_dir(&e, "bind").is_some());
// prop only looks at attr and v-bind
assert!(find_prop(&e, "bind").is_none());
find_prop(&mut e, "name").unwrap().take();
assert!(find_prop(&e, "bind").is_none());
find_prop(&mut e, "name").unwrap().take();
assert!(find_prop(&e, "name").is_none());
}
#[test]
fn find_prop_ignore_dynamic_bind() {
let e = mock_element("<p :[name]=foo/>");
assert!(find_dir(&e, "name").is_none());
assert!(find_dir(&e, "bind").is_some());
assert!(find_prop(&e, "name").is_none());
}
#[test]
fn find_dynamic_only_prop() {
let e = mock_element("<p name=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_none());
let e = mock_element("<p v-bind:name=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_some());
let e = mock_element("<p :name=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_some());
let e = mock_element("<p :[name]=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_none());
}
#[test]
fn prop_find_all() {
let e = mock_element("<p :name=foo name=bar :[name]=baz/>");
let a: Vec<_> = prop_finder(e, "name").find_all().collect();
assert_eq!(a.len(), 3);
assert!(a[0].is_ok());
assert!(a[1].is_ok());
assert!(a[2].is_err());
}
#[test]
fn layman_lazy() {
let mut test = 0;
let l = Lazy::new(|| {
test += 1;
(0..=100).sum::<i32>()
});
assert_eq!(*l, 5050);
assert_eq!(*l, 5050);
assert_eq!(test, 1);
}
} | {
pub fn take(mut self) -> M {
// TODO: avoid O(n) behavior
M::take(self.elem.borrow_mut().properties.remove(self.pos))
} | random_line_split |
util.rs | use super::{
converter::BaseConvertInfo,
flags::RuntimeHelper,
ir::{JsExpr as Js, VNodeIR},
parser::{Directive, DirectiveArg, ElemProp, Element},
scanner::Attribute,
};
use std::{
borrow::{Borrow, BorrowMut},
cell::UnsafeCell,
marker::PhantomData,
ops::Deref,
};
#[macro_export]
macro_rules! cast {
($target: expr, $pat: path) => {{
if let $pat(a,..) = $target {
a
} else {
panic!("mismatch variant when cast to {}", stringify!($pat));
}
}};
}
mod decode_html;
mod json;
mod named_chars;
pub mod rslint;
mod v_str;
pub use v_str::VStr;
pub fn non_whitespace(c: char) -> bool {
!c.is_ascii_whitespace()
}
pub fn get_core_component(tag: &str) -> Option<RuntimeHelper> {
use RuntimeHelper as RH;
Some(match tag {
"Teleport" | "teleport" => RH::TELEPORT,
"Suspense" | "suspense" => RH::SUSPENSE,
"KeepAlive" | "keep-alive" => RH::KEEP_ALIVE,
"BaseTransition" | "base-transition" => RH::BASE_TRANSITION,
_ => return None,
})
}
pub fn is_core_component(tag: &str) -> bool {
get_core_component(tag).is_some()
}
fn is_event_prop(prop: &str) -> bool {
let bytes = prop.as_bytes();
// equivalent to /^on[^a-z]/
bytes.len() > 2 && bytes.starts_with(b"on") &&!bytes[3].is_ascii_lowercase()
}
pub fn is_mergeable_prop(prop: &str) -> bool {
prop == "class" || prop == "style" || is_event_prop(prop)
}
#[inline]
pub fn not_js_identifier(c: char) -> bool {
!c.is_alphanumeric() && c!= '$' && c!= '_'
}
pub fn is_simple_identifier(s: VStr) -> bool {
if VStr::has_affix(&s) {
return false;
}
let is_ident = |c|!not_js_identifier(c);
let raw = s.raw;
raw.chars().all(is_ident) &&!raw.starts_with(|c: char| c.is_ascii_digit())
}
macro_rules! make_list {
( $($id: ident),* ) => {
&[
$(stringify!($id)),*
]
}
}
// use simple contains for small str array
// benchmark shows linear scan takes at most 10ns
// while phf or bsearch takes 30ns
const ALLOWED_GLOBALS: &[&str] = make_list![
Infinity,
undefined,
NaN,
isFinite,
isNaN,
parseFloat,
parseInt,
decodeURI,
decodeURIComponent,
encodeURI,
encodeURIComponent,
Math,
Number,
Date,
Array,
Object,
Boolean,
String,
RegExp,
Map,
Set,
JSON,
Intl,
BigInt
];
pub fn is_global_allow_listed(s: &str) -> bool {
ALLOWED_GLOBALS.contains(&s)
}
// https://github.com/vuejs/rfcs/blob/master/active-rfcs/0008-render-function-api-change.md#special-reserved-props
const RESERVED: &[&str] = make_list![
key,
ref,
onVnodeMounted,
onVnodeUpdated,
onVnodeUnmounted,
onVnodeBeforeMount,
onVnodeBeforeUpdate,
onVnodeBeforeUnmount
];
#[inline]
pub fn is_reserved_prop(tag: &str) -> bool {
RESERVED.contains(&tag)
}
pub fn is_component_tag(tag: &str) -> bool {
tag == "component" || tag == "Component"
}
pub const fn yes(_: &str) -> bool {
true
}
pub const fn no(_: &str) -> bool {
false
}
pub fn get_vnode_call_helper(v: &VNodeIR<BaseConvertInfo>) -> RuntimeHelper {
use RuntimeHelper as RH;
if v.is_block {
return if v.is_component {
RH::CREATE_BLOCK
} else {
RH::CREATE_ELEMENT_BLOCK
};
}
if v.is_component {
RH::CREATE_VNODE
} else {
RH::CREATE_ELEMENT_VNODE
}
}
pub fn is_builtin_symbol(tag: &Js, helper: RuntimeHelper) -> bool {
if let Js::Symbol(r) = tag {
r == &helper
} else {
false
}
}
pub trait PropPattern {
fn matches(&self, name: &str) -> bool;
}
impl PropPattern for &str {
fn matches(&self, name: &str) -> bool {
name == *self
}
}
impl<F> PropPattern for F
where
F: Fn(&str) -> bool,
{
fn matches(&self, name: &str) -> bool {
self(name)
}
}
impl<const N: usize> PropPattern for [&'static str; N] {
fn matches(&self, name: &str) -> bool {
self.contains(&name)
}
}
type NameExp<'a> = Option<(&'a str, Option<VStr<'a>>)>;
pub trait PropMatcher<'a> {
fn get_name_and_exp(prop: &ElemProp<'a>) -> NameExp<'a>;
fn get_ref<'b>(prop: &'b ElemProp<'a>) -> &'b Self;
fn take(prop: ElemProp<'a>) -> Self;
fn is_match<P>(p: &ElemProp<'a>, pat: &P, allow_empty: bool) -> bool
where
P: PropPattern,
{
Self::get_name_and_exp(p).map_or(false, |(name, exp)| {
pat.matches(name) && (allow_empty ||!exp.map_or(true, |v| v.is_empty()))
})
}
}
pub fn is_bind_key<'a>(arg: &Option<DirectiveArg<'a>>, name: &str) -> bool {
get_bind_key(arg).map_or(false, |v| v == name)
}
fn get_bind_key<'a>(arg: &Option<DirectiveArg<'a>>) -> Option<&'a str> {
if let DirectiveArg::Static(name) = arg.as_ref()? {
Some(name)
} else {
None
}
}
impl<'a> PropMatcher<'a> for ElemProp<'a> {
fn get_name_and_exp(prop: &ElemProp<'a>) -> NameExp<'a> {
match prop {
ElemProp::Attr(Attribute { name, value,.. }) => {
let exp = value.as_ref().map(|v| v.content);
Some((name, exp))
}
ElemProp::Dir(dir @ Directive { name: "bind",.. }) => {
let name = get_bind_key(&dir.argument)?;
let exp = dir.expression.as_ref().map(|v| v.content);
Some((name, exp))
}
_ => None,
}
}
fn get_ref<'b>(prop: &'b ElemProp<'a>) -> &'b Self {
prop
}
fn take(prop: ElemProp<'a>) -> Self {
prop
}
}
impl<'a> PropMatcher<'a> for Directive<'a> {
fn get_name_and_exp(prop: &ElemProp<'a>) -> NameExp<'a> {
if let ElemProp::Dir(Directive {
name, expression,..
}) = prop
{
let exp = expression.as_ref().map(|v| v.content);
Some((name, exp))
} else {
None
}
}
fn get_ref<'b>(prop: &'b ElemProp<'a>) -> &'b Self {
if let ElemProp::Dir(dir) = prop {
return dir;
}
unreachable!("invalid call")
}
fn take(prop: ElemProp<'a>) -> Self {
if let ElemProp::Dir(dir) = prop {
return dir;
}
unreachable!("invalid call")
}
}
pub struct PropFound<'a, E, M>
where
E: Borrow<Element<'a>>,
M: PropMatcher<'a>,
{
elem: E,
pos: usize,
m: PhantomData<&'a M>,
}
impl<'a, E, M> PropFound<'a, E, M>
where
E: Borrow<Element<'a>>,
M: PropMatcher<'a>,
{
fn new(elem: E, pos: usize) -> Option<Self> {
Some(Self {
elem,
pos,
m: PhantomData,
})
}
pub fn get_ref(&self) -> &M {
M::get_ref(&self.elem.borrow().properties[self.pos])
}
}
// take is only available when access is mutable
impl<'a, E, M> PropFound<'a, E, M>
where
E: BorrowMut<Element<'a>>,
M: PropMatcher<'a>,
{
pub fn take(mut self) -> M {
// TODO: avoid O(n) behavior
M::take(self.elem.borrow_mut().properties.remove(self.pos))
}
}
type DirFound<'a, E> = PropFound<'a, E, Directive<'a>>;
// sometimes mutable access to the element is not available so
// Borrow is used to refine PropFound so `take` is optional
pub fn dir_finder<'a, E, P>(elem: E, pat: P) -> PropFinder<'a, E, P, Directive<'a>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat)
}
pub fn find_dir<'a, E, P>(elem: E, pat: P) -> Option<DirFound<'a, E>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat).find()
}
pub fn find_dir_empty<'a, E, P>(elem: E, pat: P) -> Option<DirFound<'a, E>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat).allow_empty().find()
}
pub struct PropFinder<'a, E, P, M = ElemProp<'a>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
M: PropMatcher<'a>,
{
elem: E,
pat: P,
allow_empty: bool,
filter: fn(&ElemProp<'a>) -> bool,
m: PhantomData<&'a M>,
}
impl<'a, E, P, M> PropFinder<'a, E, P, M>
where
E: Borrow<Element<'a>>,
P: PropPattern,
M: PropMatcher<'a>,
{
fn new(elem: E, pat: P) -> Self {
Self {
elem,
pat,
allow_empty: false,
filter: |_| true,
m: PhantomData,
}
}
fn is_match(&self, p: &ElemProp<'a>) -> bool {
M::is_match(p, &self.pat, self.allow_empty)
}
pub fn dynamic_only(self) -> Self {
Self {
filter: |p| matches!(p, ElemProp::Dir(..)),
..self
}
}
pub fn attr_only(self) -> Self {
Self {
filter: |p| matches!(p, ElemProp::Attr(..)),
..self
}
}
pub fn find(self) -> Option<PropFound<'a, E, M>> {
let pos = self
.elem
.borrow()
.properties
.iter()
.position(|p| self.is_match(p) && (self.filter)(p))?;
PropFound::new(self.elem, pos)
}
pub fn allow_empty(self) -> Self {
Self {
allow_empty: true,
..self
}
}
}
impl<'a, P> PropFinder<'a, Element<'a>, P, ElemProp<'a>>
where
P: PropPattern + Copy,
{
pub fn find_all(self) -> impl Iterator<Item = Result<ElemProp<'a>, ElemProp<'a>>> {
let PropFinder {
elem,
pat,
allow_empty,
..
} = self;
elem.properties.into_iter().map(move |p| {
if ElemProp::is_match(&p, &pat, allow_empty) {
Ok(p)
} else {
Err(p)
}
})
}
}
pub fn find_prop<'a, E, P>(elem: E, pat: P) -> Option<PropFound<'a, E, ElemProp<'a>>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat).find()
}
pub fn prop_finder<'a, E, P>(elem: E, pat: P) -> PropFinder<'a, E, P>
where
E: Borrow<Element<'a>>,
P: PropPattern,
|
// since std::lazy::Lazy is not stable
// it is not thread safe, not Sync.
// it is Send if F and T is Send
pub struct Lazy<T, F = fn() -> T>(UnsafeCell<Result<T, Option<F>>>)
where
F: FnOnce() -> T;
impl<T, F> Lazy<T, F>
where
F: FnOnce() -> T,
{
pub fn new(f: F) -> Self {
Self(UnsafeCell::new(Err(Some(f))))
}
}
impl<T, F> Deref for Lazy<T, F>
where
F: FnOnce() -> T,
{
type Target = T;
fn deref(&self) -> &Self::Target {
let m = unsafe { &mut *self.0.get() };
let f = match m {
Ok(t) => return t,
Err(f) => f,
};
*m = Ok(f.take().unwrap()());
match m {
Ok(t) => t,
_ => panic!("unwrap Ok"),
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::parser::test::mock_element;
#[test]
fn test_find_dir() {
let e = mock_element("<p v-if=true/>");
let found = find_dir(&e, "if");
let found = found.expect("should found directive");
assert_eq!(found.get_ref().name, "if");
assert_eq!(e.properties.len(), 1);
}
#[test]
fn test_find_dir_mut() {
let mut e = mock_element("<p v-if=true/>");
let found = find_dir(&mut e, "if");
let found = found.expect("should found directive");
assert_eq!(found.get_ref().name, "if");
assert_eq!(found.take().name, "if");
assert!(e.properties.is_empty());
}
#[test]
fn test_find_empty_dir() {
let e = mock_element("<p v-if=true v-for>");
assert!(find_dir(&e, "if").is_some());
assert!(find_dir(&e, "for").is_none());
let found = dir_finder(&e, "for").allow_empty().find();
assert!(found.is_some());
}
#[test]
fn test_find_prop() {
let mut e = mock_element("<p :name=foo name=bar/>");
assert!(find_dir(&e, "name").is_none());
assert!(find_dir(&e, "bind").is_some());
// prop only looks at attr and v-bind
assert!(find_prop(&e, "bind").is_none());
find_prop(&mut e, "name").unwrap().take();
assert!(find_prop(&e, "bind").is_none());
find_prop(&mut e, "name").unwrap().take();
assert!(find_prop(&e, "name").is_none());
}
#[test]
fn find_prop_ignore_dynamic_bind() {
let e = mock_element("<p :[name]=foo/>");
assert!(find_dir(&e, "name").is_none());
assert!(find_dir(&e, "bind").is_some());
assert!(find_prop(&e, "name").is_none());
}
#[test]
fn find_dynamic_only_prop() {
let e = mock_element("<p name=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_none());
let e = mock_element("<p v-bind:name=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_some());
let e = mock_element("<p :name=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_some());
let e = mock_element("<p :[name]=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_none());
}
#[test]
fn prop_find_all() {
let e = mock_element("<p :name=foo name=bar :[name]=baz/>");
let a: Vec<_> = prop_finder(e, "name").find_all().collect();
assert_eq!(a.len(), 3);
assert!(a[0].is_ok());
assert!(a[1].is_ok());
assert!(a[2].is_err());
}
#[test]
fn layman_lazy() {
let mut test = 0;
let l = Lazy::new(|| {
test += 1;
(0..=100).sum::<i32>()
});
assert_eq!(*l, 5050);
assert_eq!(*l, 5050);
assert_eq!(test, 1);
}
}
| {
PropFinder::new(elem, pat)
} | identifier_body |
util.rs | use super::{
converter::BaseConvertInfo,
flags::RuntimeHelper,
ir::{JsExpr as Js, VNodeIR},
parser::{Directive, DirectiveArg, ElemProp, Element},
scanner::Attribute,
};
use std::{
borrow::{Borrow, BorrowMut},
cell::UnsafeCell,
marker::PhantomData,
ops::Deref,
};
#[macro_export]
macro_rules! cast {
($target: expr, $pat: path) => {{
if let $pat(a,..) = $target {
a
} else {
panic!("mismatch variant when cast to {}", stringify!($pat));
}
}};
}
mod decode_html;
mod json;
mod named_chars;
pub mod rslint;
mod v_str;
pub use v_str::VStr;
pub fn non_whitespace(c: char) -> bool {
!c.is_ascii_whitespace()
}
pub fn get_core_component(tag: &str) -> Option<RuntimeHelper> {
use RuntimeHelper as RH;
Some(match tag {
"Teleport" | "teleport" => RH::TELEPORT,
"Suspense" | "suspense" => RH::SUSPENSE,
"KeepAlive" | "keep-alive" => RH::KEEP_ALIVE,
"BaseTransition" | "base-transition" => RH::BASE_TRANSITION,
_ => return None,
})
}
pub fn is_core_component(tag: &str) -> bool {
get_core_component(tag).is_some()
}
fn is_event_prop(prop: &str) -> bool {
let bytes = prop.as_bytes();
// equivalent to /^on[^a-z]/
bytes.len() > 2 && bytes.starts_with(b"on") &&!bytes[3].is_ascii_lowercase()
}
pub fn is_mergeable_prop(prop: &str) -> bool {
prop == "class" || prop == "style" || is_event_prop(prop)
}
#[inline]
pub fn not_js_identifier(c: char) -> bool {
!c.is_alphanumeric() && c!= '$' && c!= '_'
}
pub fn is_simple_identifier(s: VStr) -> bool {
if VStr::has_affix(&s) {
return false;
}
let is_ident = |c|!not_js_identifier(c);
let raw = s.raw;
raw.chars().all(is_ident) &&!raw.starts_with(|c: char| c.is_ascii_digit())
}
macro_rules! make_list {
( $($id: ident),* ) => {
&[
$(stringify!($id)),*
]
}
}
// use simple contains for small str array
// benchmark shows linear scan takes at most 10ns
// while phf or bsearch takes 30ns
const ALLOWED_GLOBALS: &[&str] = make_list![
Infinity,
undefined,
NaN,
isFinite,
isNaN,
parseFloat,
parseInt,
decodeURI,
decodeURIComponent,
encodeURI,
encodeURIComponent,
Math,
Number,
Date,
Array,
Object,
Boolean,
String,
RegExp,
Map,
Set,
JSON,
Intl,
BigInt
];
pub fn is_global_allow_listed(s: &str) -> bool {
ALLOWED_GLOBALS.contains(&s)
}
// https://github.com/vuejs/rfcs/blob/master/active-rfcs/0008-render-function-api-change.md#special-reserved-props
const RESERVED: &[&str] = make_list![
key,
ref,
onVnodeMounted,
onVnodeUpdated,
onVnodeUnmounted,
onVnodeBeforeMount,
onVnodeBeforeUpdate,
onVnodeBeforeUnmount
];
#[inline]
pub fn is_reserved_prop(tag: &str) -> bool {
RESERVED.contains(&tag)
}
pub fn is_component_tag(tag: &str) -> bool {
tag == "component" || tag == "Component"
}
pub const fn yes(_: &str) -> bool {
true
}
pub const fn no(_: &str) -> bool {
false
}
pub fn get_vnode_call_helper(v: &VNodeIR<BaseConvertInfo>) -> RuntimeHelper {
use RuntimeHelper as RH;
if v.is_block {
return if v.is_component {
RH::CREATE_BLOCK
} else {
RH::CREATE_ELEMENT_BLOCK
};
}
if v.is_component {
RH::CREATE_VNODE
} else {
RH::CREATE_ELEMENT_VNODE
}
}
pub fn is_builtin_symbol(tag: &Js, helper: RuntimeHelper) -> bool {
if let Js::Symbol(r) = tag {
r == &helper
} else {
false
}
}
pub trait PropPattern {
fn matches(&self, name: &str) -> bool;
}
impl PropPattern for &str {
fn matches(&self, name: &str) -> bool {
name == *self
}
}
impl<F> PropPattern for F
where
F: Fn(&str) -> bool,
{
fn matches(&self, name: &str) -> bool {
self(name)
}
}
impl<const N: usize> PropPattern for [&'static str; N] {
fn matches(&self, name: &str) -> bool {
self.contains(&name)
}
}
type NameExp<'a> = Option<(&'a str, Option<VStr<'a>>)>;
pub trait PropMatcher<'a> {
fn get_name_and_exp(prop: &ElemProp<'a>) -> NameExp<'a>;
fn get_ref<'b>(prop: &'b ElemProp<'a>) -> &'b Self;
fn take(prop: ElemProp<'a>) -> Self;
fn is_match<P>(p: &ElemProp<'a>, pat: &P, allow_empty: bool) -> bool
where
P: PropPattern,
{
Self::get_name_and_exp(p).map_or(false, |(name, exp)| {
pat.matches(name) && (allow_empty ||!exp.map_or(true, |v| v.is_empty()))
})
}
}
pub fn is_bind_key<'a>(arg: &Option<DirectiveArg<'a>>, name: &str) -> bool {
get_bind_key(arg).map_or(false, |v| v == name)
}
fn get_bind_key<'a>(arg: &Option<DirectiveArg<'a>>) -> Option<&'a str> {
if let DirectiveArg::Static(name) = arg.as_ref()? {
Some(name)
} else |
}
impl<'a> PropMatcher<'a> for ElemProp<'a> {
fn get_name_and_exp(prop: &ElemProp<'a>) -> NameExp<'a> {
match prop {
ElemProp::Attr(Attribute { name, value,.. }) => {
let exp = value.as_ref().map(|v| v.content);
Some((name, exp))
}
ElemProp::Dir(dir @ Directive { name: "bind",.. }) => {
let name = get_bind_key(&dir.argument)?;
let exp = dir.expression.as_ref().map(|v| v.content);
Some((name, exp))
}
_ => None,
}
}
fn get_ref<'b>(prop: &'b ElemProp<'a>) -> &'b Self {
prop
}
fn take(prop: ElemProp<'a>) -> Self {
prop
}
}
impl<'a> PropMatcher<'a> for Directive<'a> {
fn get_name_and_exp(prop: &ElemProp<'a>) -> NameExp<'a> {
if let ElemProp::Dir(Directive {
name, expression,..
}) = prop
{
let exp = expression.as_ref().map(|v| v.content);
Some((name, exp))
} else {
None
}
}
fn get_ref<'b>(prop: &'b ElemProp<'a>) -> &'b Self {
if let ElemProp::Dir(dir) = prop {
return dir;
}
unreachable!("invalid call")
}
fn take(prop: ElemProp<'a>) -> Self {
if let ElemProp::Dir(dir) = prop {
return dir;
}
unreachable!("invalid call")
}
}
pub struct PropFound<'a, E, M>
where
E: Borrow<Element<'a>>,
M: PropMatcher<'a>,
{
elem: E,
pos: usize,
m: PhantomData<&'a M>,
}
impl<'a, E, M> PropFound<'a, E, M>
where
E: Borrow<Element<'a>>,
M: PropMatcher<'a>,
{
fn new(elem: E, pos: usize) -> Option<Self> {
Some(Self {
elem,
pos,
m: PhantomData,
})
}
pub fn get_ref(&self) -> &M {
M::get_ref(&self.elem.borrow().properties[self.pos])
}
}
// take is only available when access is mutable
impl<'a, E, M> PropFound<'a, E, M>
where
E: BorrowMut<Element<'a>>,
M: PropMatcher<'a>,
{
pub fn take(mut self) -> M {
// TODO: avoid O(n) behavior
M::take(self.elem.borrow_mut().properties.remove(self.pos))
}
}
type DirFound<'a, E> = PropFound<'a, E, Directive<'a>>;
// sometimes mutable access to the element is not available so
// Borrow is used to refine PropFound so `take` is optional
pub fn dir_finder<'a, E, P>(elem: E, pat: P) -> PropFinder<'a, E, P, Directive<'a>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat)
}
pub fn find_dir<'a, E, P>(elem: E, pat: P) -> Option<DirFound<'a, E>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat).find()
}
pub fn find_dir_empty<'a, E, P>(elem: E, pat: P) -> Option<DirFound<'a, E>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat).allow_empty().find()
}
pub struct PropFinder<'a, E, P, M = ElemProp<'a>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
M: PropMatcher<'a>,
{
elem: E,
pat: P,
allow_empty: bool,
filter: fn(&ElemProp<'a>) -> bool,
m: PhantomData<&'a M>,
}
impl<'a, E, P, M> PropFinder<'a, E, P, M>
where
E: Borrow<Element<'a>>,
P: PropPattern,
M: PropMatcher<'a>,
{
fn new(elem: E, pat: P) -> Self {
Self {
elem,
pat,
allow_empty: false,
filter: |_| true,
m: PhantomData,
}
}
fn is_match(&self, p: &ElemProp<'a>) -> bool {
M::is_match(p, &self.pat, self.allow_empty)
}
pub fn dynamic_only(self) -> Self {
Self {
filter: |p| matches!(p, ElemProp::Dir(..)),
..self
}
}
pub fn attr_only(self) -> Self {
Self {
filter: |p| matches!(p, ElemProp::Attr(..)),
..self
}
}
pub fn find(self) -> Option<PropFound<'a, E, M>> {
let pos = self
.elem
.borrow()
.properties
.iter()
.position(|p| self.is_match(p) && (self.filter)(p))?;
PropFound::new(self.elem, pos)
}
pub fn allow_empty(self) -> Self {
Self {
allow_empty: true,
..self
}
}
}
impl<'a, P> PropFinder<'a, Element<'a>, P, ElemProp<'a>>
where
P: PropPattern + Copy,
{
pub fn find_all(self) -> impl Iterator<Item = Result<ElemProp<'a>, ElemProp<'a>>> {
let PropFinder {
elem,
pat,
allow_empty,
..
} = self;
elem.properties.into_iter().map(move |p| {
if ElemProp::is_match(&p, &pat, allow_empty) {
Ok(p)
} else {
Err(p)
}
})
}
}
pub fn find_prop<'a, E, P>(elem: E, pat: P) -> Option<PropFound<'a, E, ElemProp<'a>>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat).find()
}
pub fn prop_finder<'a, E, P>(elem: E, pat: P) -> PropFinder<'a, E, P>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat)
}
// since std::lazy::Lazy is not stable
// it is not thread safe, not Sync.
// it is Send if F and T is Send
pub struct Lazy<T, F = fn() -> T>(UnsafeCell<Result<T, Option<F>>>)
where
F: FnOnce() -> T;
impl<T, F> Lazy<T, F>
where
F: FnOnce() -> T,
{
pub fn new(f: F) -> Self {
Self(UnsafeCell::new(Err(Some(f))))
}
}
impl<T, F> Deref for Lazy<T, F>
where
F: FnOnce() -> T,
{
type Target = T;
fn deref(&self) -> &Self::Target {
let m = unsafe { &mut *self.0.get() };
let f = match m {
Ok(t) => return t,
Err(f) => f,
};
*m = Ok(f.take().unwrap()());
match m {
Ok(t) => t,
_ => panic!("unwrap Ok"),
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::parser::test::mock_element;
#[test]
fn test_find_dir() {
let e = mock_element("<p v-if=true/>");
let found = find_dir(&e, "if");
let found = found.expect("should found directive");
assert_eq!(found.get_ref().name, "if");
assert_eq!(e.properties.len(), 1);
}
#[test]
fn test_find_dir_mut() {
let mut e = mock_element("<p v-if=true/>");
let found = find_dir(&mut e, "if");
let found = found.expect("should found directive");
assert_eq!(found.get_ref().name, "if");
assert_eq!(found.take().name, "if");
assert!(e.properties.is_empty());
}
#[test]
fn test_find_empty_dir() {
let e = mock_element("<p v-if=true v-for>");
assert!(find_dir(&e, "if").is_some());
assert!(find_dir(&e, "for").is_none());
let found = dir_finder(&e, "for").allow_empty().find();
assert!(found.is_some());
}
#[test]
fn test_find_prop() {
let mut e = mock_element("<p :name=foo name=bar/>");
assert!(find_dir(&e, "name").is_none());
assert!(find_dir(&e, "bind").is_some());
// prop only looks at attr and v-bind
assert!(find_prop(&e, "bind").is_none());
find_prop(&mut e, "name").unwrap().take();
assert!(find_prop(&e, "bind").is_none());
find_prop(&mut e, "name").unwrap().take();
assert!(find_prop(&e, "name").is_none());
}
#[test]
fn find_prop_ignore_dynamic_bind() {
let e = mock_element("<p :[name]=foo/>");
assert!(find_dir(&e, "name").is_none());
assert!(find_dir(&e, "bind").is_some());
assert!(find_prop(&e, "name").is_none());
}
#[test]
fn find_dynamic_only_prop() {
let e = mock_element("<p name=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_none());
let e = mock_element("<p v-bind:name=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_some());
let e = mock_element("<p :name=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_some());
let e = mock_element("<p :[name]=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_none());
}
#[test]
fn prop_find_all() {
let e = mock_element("<p :name=foo name=bar :[name]=baz/>");
let a: Vec<_> = prop_finder(e, "name").find_all().collect();
assert_eq!(a.len(), 3);
assert!(a[0].is_ok());
assert!(a[1].is_ok());
assert!(a[2].is_err());
}
#[test]
fn layman_lazy() {
let mut test = 0;
let l = Lazy::new(|| {
test += 1;
(0..=100).sum::<i32>()
});
assert_eq!(*l, 5050);
assert_eq!(*l, 5050);
assert_eq!(test, 1);
}
}
| {
None
} | conditional_block |
util.rs | use super::{
converter::BaseConvertInfo,
flags::RuntimeHelper,
ir::{JsExpr as Js, VNodeIR},
parser::{Directive, DirectiveArg, ElemProp, Element},
scanner::Attribute,
};
use std::{
borrow::{Borrow, BorrowMut},
cell::UnsafeCell,
marker::PhantomData,
ops::Deref,
};
#[macro_export]
macro_rules! cast {
($target: expr, $pat: path) => {{
if let $pat(a,..) = $target {
a
} else {
panic!("mismatch variant when cast to {}", stringify!($pat));
}
}};
}
mod decode_html;
mod json;
mod named_chars;
pub mod rslint;
mod v_str;
pub use v_str::VStr;
pub fn non_whitespace(c: char) -> bool {
!c.is_ascii_whitespace()
}
pub fn get_core_component(tag: &str) -> Option<RuntimeHelper> {
use RuntimeHelper as RH;
Some(match tag {
"Teleport" | "teleport" => RH::TELEPORT,
"Suspense" | "suspense" => RH::SUSPENSE,
"KeepAlive" | "keep-alive" => RH::KEEP_ALIVE,
"BaseTransition" | "base-transition" => RH::BASE_TRANSITION,
_ => return None,
})
}
pub fn is_core_component(tag: &str) -> bool {
get_core_component(tag).is_some()
}
fn is_event_prop(prop: &str) -> bool {
let bytes = prop.as_bytes();
// equivalent to /^on[^a-z]/
bytes.len() > 2 && bytes.starts_with(b"on") &&!bytes[3].is_ascii_lowercase()
}
pub fn is_mergeable_prop(prop: &str) -> bool {
prop == "class" || prop == "style" || is_event_prop(prop)
}
#[inline]
pub fn not_js_identifier(c: char) -> bool {
!c.is_alphanumeric() && c!= '$' && c!= '_'
}
pub fn is_simple_identifier(s: VStr) -> bool {
if VStr::has_affix(&s) {
return false;
}
let is_ident = |c|!not_js_identifier(c);
let raw = s.raw;
raw.chars().all(is_ident) &&!raw.starts_with(|c: char| c.is_ascii_digit())
}
macro_rules! make_list {
( $($id: ident),* ) => {
&[
$(stringify!($id)),*
]
}
}
// use simple contains for small str array
// benchmark shows linear scan takes at most 10ns
// while phf or bsearch takes 30ns
const ALLOWED_GLOBALS: &[&str] = make_list![
Infinity,
undefined,
NaN,
isFinite,
isNaN,
parseFloat,
parseInt,
decodeURI,
decodeURIComponent,
encodeURI,
encodeURIComponent,
Math,
Number,
Date,
Array,
Object,
Boolean,
String,
RegExp,
Map,
Set,
JSON,
Intl,
BigInt
];
pub fn is_global_allow_listed(s: &str) -> bool {
ALLOWED_GLOBALS.contains(&s)
}
// https://github.com/vuejs/rfcs/blob/master/active-rfcs/0008-render-function-api-change.md#special-reserved-props
const RESERVED: &[&str] = make_list![
key,
ref,
onVnodeMounted,
onVnodeUpdated,
onVnodeUnmounted,
onVnodeBeforeMount,
onVnodeBeforeUpdate,
onVnodeBeforeUnmount
];
#[inline]
pub fn is_reserved_prop(tag: &str) -> bool {
RESERVED.contains(&tag)
}
pub fn is_component_tag(tag: &str) -> bool {
tag == "component" || tag == "Component"
}
pub const fn yes(_: &str) -> bool {
true
}
pub const fn no(_: &str) -> bool {
false
}
pub fn get_vnode_call_helper(v: &VNodeIR<BaseConvertInfo>) -> RuntimeHelper {
use RuntimeHelper as RH;
if v.is_block {
return if v.is_component {
RH::CREATE_BLOCK
} else {
RH::CREATE_ELEMENT_BLOCK
};
}
if v.is_component {
RH::CREATE_VNODE
} else {
RH::CREATE_ELEMENT_VNODE
}
}
pub fn is_builtin_symbol(tag: &Js, helper: RuntimeHelper) -> bool {
if let Js::Symbol(r) = tag {
r == &helper
} else {
false
}
}
pub trait PropPattern {
fn matches(&self, name: &str) -> bool;
}
impl PropPattern for &str {
fn matches(&self, name: &str) -> bool {
name == *self
}
}
impl<F> PropPattern for F
where
F: Fn(&str) -> bool,
{
fn matches(&self, name: &str) -> bool {
self(name)
}
}
impl<const N: usize> PropPattern for [&'static str; N] {
fn matches(&self, name: &str) -> bool {
self.contains(&name)
}
}
type NameExp<'a> = Option<(&'a str, Option<VStr<'a>>)>;
pub trait PropMatcher<'a> {
fn get_name_and_exp(prop: &ElemProp<'a>) -> NameExp<'a>;
fn get_ref<'b>(prop: &'b ElemProp<'a>) -> &'b Self;
fn take(prop: ElemProp<'a>) -> Self;
fn is_match<P>(p: &ElemProp<'a>, pat: &P, allow_empty: bool) -> bool
where
P: PropPattern,
{
Self::get_name_and_exp(p).map_or(false, |(name, exp)| {
pat.matches(name) && (allow_empty ||!exp.map_or(true, |v| v.is_empty()))
})
}
}
pub fn is_bind_key<'a>(arg: &Option<DirectiveArg<'a>>, name: &str) -> bool {
get_bind_key(arg).map_or(false, |v| v == name)
}
fn get_bind_key<'a>(arg: &Option<DirectiveArg<'a>>) -> Option<&'a str> {
if let DirectiveArg::Static(name) = arg.as_ref()? {
Some(name)
} else {
None
}
}
impl<'a> PropMatcher<'a> for ElemProp<'a> {
fn get_name_and_exp(prop: &ElemProp<'a>) -> NameExp<'a> {
match prop {
ElemProp::Attr(Attribute { name, value,.. }) => {
let exp = value.as_ref().map(|v| v.content);
Some((name, exp))
}
ElemProp::Dir(dir @ Directive { name: "bind",.. }) => {
let name = get_bind_key(&dir.argument)?;
let exp = dir.expression.as_ref().map(|v| v.content);
Some((name, exp))
}
_ => None,
}
}
fn get_ref<'b>(prop: &'b ElemProp<'a>) -> &'b Self {
prop
}
fn take(prop: ElemProp<'a>) -> Self {
prop
}
}
impl<'a> PropMatcher<'a> for Directive<'a> {
fn get_name_and_exp(prop: &ElemProp<'a>) -> NameExp<'a> {
if let ElemProp::Dir(Directive {
name, expression,..
}) = prop
{
let exp = expression.as_ref().map(|v| v.content);
Some((name, exp))
} else {
None
}
}
fn get_ref<'b>(prop: &'b ElemProp<'a>) -> &'b Self {
if let ElemProp::Dir(dir) = prop {
return dir;
}
unreachable!("invalid call")
}
fn take(prop: ElemProp<'a>) -> Self {
if let ElemProp::Dir(dir) = prop {
return dir;
}
unreachable!("invalid call")
}
}
pub struct | <'a, E, M>
where
E: Borrow<Element<'a>>,
M: PropMatcher<'a>,
{
elem: E,
pos: usize,
m: PhantomData<&'a M>,
}
impl<'a, E, M> PropFound<'a, E, M>
where
E: Borrow<Element<'a>>,
M: PropMatcher<'a>,
{
fn new(elem: E, pos: usize) -> Option<Self> {
Some(Self {
elem,
pos,
m: PhantomData,
})
}
pub fn get_ref(&self) -> &M {
M::get_ref(&self.elem.borrow().properties[self.pos])
}
}
// take is only available when access is mutable
impl<'a, E, M> PropFound<'a, E, M>
where
E: BorrowMut<Element<'a>>,
M: PropMatcher<'a>,
{
pub fn take(mut self) -> M {
// TODO: avoid O(n) behavior
M::take(self.elem.borrow_mut().properties.remove(self.pos))
}
}
type DirFound<'a, E> = PropFound<'a, E, Directive<'a>>;
// sometimes mutable access to the element is not available so
// Borrow is used to refine PropFound so `take` is optional
pub fn dir_finder<'a, E, P>(elem: E, pat: P) -> PropFinder<'a, E, P, Directive<'a>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat)
}
pub fn find_dir<'a, E, P>(elem: E, pat: P) -> Option<DirFound<'a, E>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat).find()
}
pub fn find_dir_empty<'a, E, P>(elem: E, pat: P) -> Option<DirFound<'a, E>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat).allow_empty().find()
}
pub struct PropFinder<'a, E, P, M = ElemProp<'a>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
M: PropMatcher<'a>,
{
elem: E,
pat: P,
allow_empty: bool,
filter: fn(&ElemProp<'a>) -> bool,
m: PhantomData<&'a M>,
}
impl<'a, E, P, M> PropFinder<'a, E, P, M>
where
E: Borrow<Element<'a>>,
P: PropPattern,
M: PropMatcher<'a>,
{
fn new(elem: E, pat: P) -> Self {
Self {
elem,
pat,
allow_empty: false,
filter: |_| true,
m: PhantomData,
}
}
fn is_match(&self, p: &ElemProp<'a>) -> bool {
M::is_match(p, &self.pat, self.allow_empty)
}
pub fn dynamic_only(self) -> Self {
Self {
filter: |p| matches!(p, ElemProp::Dir(..)),
..self
}
}
pub fn attr_only(self) -> Self {
Self {
filter: |p| matches!(p, ElemProp::Attr(..)),
..self
}
}
pub fn find(self) -> Option<PropFound<'a, E, M>> {
let pos = self
.elem
.borrow()
.properties
.iter()
.position(|p| self.is_match(p) && (self.filter)(p))?;
PropFound::new(self.elem, pos)
}
pub fn allow_empty(self) -> Self {
Self {
allow_empty: true,
..self
}
}
}
impl<'a, P> PropFinder<'a, Element<'a>, P, ElemProp<'a>>
where
P: PropPattern + Copy,
{
pub fn find_all(self) -> impl Iterator<Item = Result<ElemProp<'a>, ElemProp<'a>>> {
let PropFinder {
elem,
pat,
allow_empty,
..
} = self;
elem.properties.into_iter().map(move |p| {
if ElemProp::is_match(&p, &pat, allow_empty) {
Ok(p)
} else {
Err(p)
}
})
}
}
pub fn find_prop<'a, E, P>(elem: E, pat: P) -> Option<PropFound<'a, E, ElemProp<'a>>>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat).find()
}
pub fn prop_finder<'a, E, P>(elem: E, pat: P) -> PropFinder<'a, E, P>
where
E: Borrow<Element<'a>>,
P: PropPattern,
{
PropFinder::new(elem, pat)
}
// since std::lazy::Lazy is not stable
// it is not thread safe, not Sync.
// it is Send if F and T is Send
pub struct Lazy<T, F = fn() -> T>(UnsafeCell<Result<T, Option<F>>>)
where
F: FnOnce() -> T;
impl<T, F> Lazy<T, F>
where
F: FnOnce() -> T,
{
pub fn new(f: F) -> Self {
Self(UnsafeCell::new(Err(Some(f))))
}
}
impl<T, F> Deref for Lazy<T, F>
where
F: FnOnce() -> T,
{
type Target = T;
fn deref(&self) -> &Self::Target {
let m = unsafe { &mut *self.0.get() };
let f = match m {
Ok(t) => return t,
Err(f) => f,
};
*m = Ok(f.take().unwrap()());
match m {
Ok(t) => t,
_ => panic!("unwrap Ok"),
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::parser::test::mock_element;
#[test]
fn test_find_dir() {
let e = mock_element("<p v-if=true/>");
let found = find_dir(&e, "if");
let found = found.expect("should found directive");
assert_eq!(found.get_ref().name, "if");
assert_eq!(e.properties.len(), 1);
}
#[test]
fn test_find_dir_mut() {
let mut e = mock_element("<p v-if=true/>");
let found = find_dir(&mut e, "if");
let found = found.expect("should found directive");
assert_eq!(found.get_ref().name, "if");
assert_eq!(found.take().name, "if");
assert!(e.properties.is_empty());
}
#[test]
fn test_find_empty_dir() {
let e = mock_element("<p v-if=true v-for>");
assert!(find_dir(&e, "if").is_some());
assert!(find_dir(&e, "for").is_none());
let found = dir_finder(&e, "for").allow_empty().find();
assert!(found.is_some());
}
#[test]
fn test_find_prop() {
let mut e = mock_element("<p :name=foo name=bar/>");
assert!(find_dir(&e, "name").is_none());
assert!(find_dir(&e, "bind").is_some());
// prop only looks at attr and v-bind
assert!(find_prop(&e, "bind").is_none());
find_prop(&mut e, "name").unwrap().take();
assert!(find_prop(&e, "bind").is_none());
find_prop(&mut e, "name").unwrap().take();
assert!(find_prop(&e, "name").is_none());
}
#[test]
fn find_prop_ignore_dynamic_bind() {
let e = mock_element("<p :[name]=foo/>");
assert!(find_dir(&e, "name").is_none());
assert!(find_dir(&e, "bind").is_some());
assert!(find_prop(&e, "name").is_none());
}
#[test]
fn find_dynamic_only_prop() {
let e = mock_element("<p name=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_none());
let e = mock_element("<p v-bind:name=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_some());
let e = mock_element("<p :name=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_some());
let e = mock_element("<p :[name]=foo/>");
assert!(prop_finder(&e, "name").dynamic_only().find().is_none());
}
#[test]
fn prop_find_all() {
let e = mock_element("<p :name=foo name=bar :[name]=baz/>");
let a: Vec<_> = prop_finder(e, "name").find_all().collect();
assert_eq!(a.len(), 3);
assert!(a[0].is_ok());
assert!(a[1].is_ok());
assert!(a[2].is_err());
}
#[test]
fn layman_lazy() {
let mut test = 0;
let l = Lazy::new(|| {
test += 1;
(0..=100).sum::<i32>()
});
assert_eq!(*l, 5050);
assert_eq!(*l, 5050);
assert_eq!(test, 1);
}
}
| PropFound | identifier_name |
lib.rs | // Copyright 2019-2023 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
#![cfg_attr(doc_cfg, feature(doc_cfg))]
pub use anyhow::Result;
use cargo_toml::{Dependency, Manifest};
use heck::AsShoutySnakeCase;
use tauri_utils::{
config::Config,
resources::{external_binaries, resource_relpath, ResourcePaths},
};
use std::path::{Path, PathBuf};
#[cfg(feature = "codegen")]
mod codegen;
mod static_vcruntime;
#[cfg(feature = "codegen")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "codegen")))]
pub use codegen::context::CodegenContext;
fn copy_file(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result<()> {
let from = from.as_ref();
let to = to.as_ref();
if!from.exists() {
return Err(anyhow::anyhow!("{:?} does not exist", from));
}
if!from.is_file() {
return Err(anyhow::anyhow!("{:?} is not a file", from));
}
let dest_dir = to.parent().expect("No data in parent");
std::fs::create_dir_all(dest_dir)?;
std::fs::copy(from, to)?;
Ok(())
}
fn copy_binaries(
binaries: ResourcePaths,
target_triple: &str,
path: &Path,
package_name: Option<&String>,
) -> Result<()> {
for src in binaries {
let src = src?;
println!("cargo:rerun-if-changed={}", src.display());
let file_name = src
.file_name()
.expect("failed to extract external binary filename")
.to_string_lossy()
.replace(&format!("-{target_triple}"), "");
if package_name.map_or(false, |n| n == &file_name) {
return Err(anyhow::anyhow!(
"Cannot define a sidecar with the same name as the Cargo package name `{}`. Please change the sidecar name in the filesystem and the Tauri configuration.",
file_name
));
}
let dest = path.join(file_name);
if dest.exists() {
std::fs::remove_file(&dest).unwrap();
}
copy_file(&src, &dest)?;
}
Ok(())
}
/// Copies resources to a path.
fn copy_resources(resources: ResourcePaths<'_>, path: &Path) -> Result<()> {
for src in resources {
let src = src?;
println!("cargo:rerun-if-changed={}", src.display());
let dest = path.join(resource_relpath(&src));
copy_file(&src, dest)?;
}
Ok(())
}
// checks if the given Cargo feature is enabled.
fn has_feature(feature: &str) -> bool {
// when a feature is enabled, Cargo sets the `CARGO_FEATURE_<name` env var to 1
// https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts
std::env::var(format!("CARGO_FEATURE_{}", AsShoutySnakeCase(feature)))
.map(|x| x == "1")
.unwrap_or(false)
}
// creates a cfg alias if `has_feature` is true.
// `alias` must be a snake case string.
fn cfg_alias(alias: &str, has_feature: bool) {
if has_feature {
println!("cargo:rustc-cfg={alias}");
}
}
/// Attributes used on Windows.
#[allow(dead_code)]
#[derive(Debug, Default)]
pub struct WindowsAttributes {
window_icon_path: Option<PathBuf>,
/// The path to the sdk location.
///
/// For the GNU toolkit this has to be the path where MinGW put windres.exe and ar.exe.
/// This could be something like: "C:\Program Files\mingw-w64\x86_64-5.3.0-win32-seh-rt_v4-rev0\mingw64\bin"
///
/// For MSVC the Windows SDK has to be installed. It comes with the resource compiler rc.exe.
/// This should be set to the root directory of the Windows SDK, e.g., "C:\Program Files (x86)\Windows Kits\10" or,
/// if multiple 10 versions are installed, set it directly to the correct bin directory "C:\Program Files (x86)\Windows Kits\10\bin\10.0.14393.0\x64"
///
/// If it is left unset, it will look up a path in the registry, i.e. HKLM\SOFTWARE\Microsoft\Windows Kits\Installed Roots
sdk_dir: Option<PathBuf>,
/// A string containing an [application manifest] to be included with the application on Windows.
///
/// Defaults to:
/// ```ignore
#[doc = include_str!("window-app-manifest.xml")]
/// ```
///
/// [application manifest]: https://learn.microsoft.com/en-us/windows/win32/sbscs/application-manifests
app_manifest: Option<String>,
}
impl WindowsAttributes {
/// Creates the default attribute set.
pub fn new() -> Self {
Self::default()
}
/// Sets the icon to use on the window. Currently only used on Windows.
/// It must be in `ico` format. Defaults to `icons/icon.ico`.
#[must_use]
pub fn window_icon_path<P: AsRef<Path>>(mut self, window_icon_path: P) -> Self {
self
.window_icon_path
.replace(window_icon_path.as_ref().into());
self
}
/// Sets the sdk dir for windows. Currently only used on Windows. This must be a valid UTF-8
/// path. Defaults to whatever the `winres` crate determines is best.
#[must_use]
pub fn sdk_dir<P: AsRef<Path>>(mut self, sdk_dir: P) -> Self {
self.sdk_dir = Some(sdk_dir.as_ref().into());
self
}
/// Sets the Windows app [manifest].
///
/// # Example
///
/// The following manifest will brand the exe as requesting administrator privileges.
/// Thus, everytime it is executed, a Windows UAC dialog will appear.
///
/// Note that you can move the manifest contents to a separate file and use `include_str!("manifest.xml")`
/// instead of the inline string.
///
/// ```rust,no_run
/// let mut windows = tauri_build::WindowsAttributes::new();
/// windows = windows.app_manifest(r#"
/// <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
/// <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
/// <security>
/// <requestedPrivileges>
/// <requestedExecutionLevel level="requireAdministrator" uiAccess="false" />
/// </requestedPrivileges>
/// </security>
/// </trustInfo>
/// </assembly>
/// "#);
/// tauri_build::try_build(
/// tauri_build::Attributes::new().windows_attributes(windows)
/// ).expect("failed to run build script");
/// ```
///
/// Defaults to:
/// ```ignore
#[doc = include_str!("window-app-manifest.xml")]
/// [manifest]: https://learn.microsoft.com/en-us/windows/win32/sbscs/application-manifests
/// ```
#[must_use]
pub fn app_manifest<S: AsRef<str>>(mut self, manifest: S) -> Self {
self.app_manifest = Some(manifest.as_ref().to_string());
self
}
}
/// The attributes used on the build.
#[derive(Debug, Default)]
pub struct Attributes {
#[allow(dead_code)]
windows_attributes: WindowsAttributes,
}
impl Attributes {
/// Creates the default attribute set.
pub fn new() -> Self |
/// Sets the icon to use on the window. Currently only used on Windows.
#[must_use]
pub fn windows_attributes(mut self, windows_attributes: WindowsAttributes) -> Self {
self.windows_attributes = windows_attributes;
self
}
}
/// Run all build time helpers for your Tauri Application.
///
/// The current helpers include the following:
/// * Generates a Windows Resource file when targeting Windows.
///
/// # Platforms
///
/// [`build()`] should be called inside of `build.rs` regardless of the platform:
/// * New helpers may target more platforms in the future.
/// * Platform specific code is handled by the helpers automatically.
/// * A build script is required in order to activate some cargo environmental variables that are
/// used when generating code and embedding assets - so [`build()`] may as well be called.
///
/// In short, this is saying don't put the call to [`build()`] behind a `#[cfg(windows)]`.
///
/// # Panics
///
/// If any of the build time helpers fail, they will [`std::panic!`] with the related error message.
/// This is typically desirable when running inside a build script; see [`try_build`] for no panics.
pub fn build() {
if let Err(error) = try_build(Attributes::default()) {
let error = format!("{error:#}");
println!("{error}");
if error.starts_with("unknown field") {
print!("found an unknown configuration field. This usually means that you are using a CLI version that is newer than `tauri-build` and is incompatible. ");
println!(
"Please try updating the Rust crates by running `cargo update` in the Tauri app folder."
);
}
std::process::exit(1);
}
}
/// Non-panicking [`build()`].
#[allow(unused_variables)]
pub fn try_build(attributes: Attributes) -> Result<()> {
use anyhow::anyhow;
println!("cargo:rerun-if-env-changed=TAURI_CONFIG");
println!("cargo:rerun-if-changed=tauri.conf.json");
#[cfg(feature = "config-json5")]
println!("cargo:rerun-if-changed=tauri.conf.json5");
#[cfg(feature = "config-toml")]
println!("cargo:rerun-if-changed=Tauri.toml");
let target_os = std::env::var("CARGO_CFG_TARGET_OS").unwrap();
let mobile = target_os == "ios" || target_os == "android";
cfg_alias("desktop",!mobile);
cfg_alias("mobile", mobile);
let mut config = serde_json::from_value(tauri_utils::config::parse::read_from(
std::env::current_dir().unwrap(),
)?)?;
if let Ok(env) = std::env::var("TAURI_CONFIG") {
let merge_config: serde_json::Value = serde_json::from_str(&env)?;
json_patch::merge(&mut config, &merge_config);
}
let config: Config = serde_json::from_value(config)?;
cfg_alias("dev",!has_feature("custom-protocol"));
let ws_path = get_workspace_dir()?;
let mut manifest =
Manifest::<cargo_toml::Value>::from_slice_with_metadata(&std::fs::read("Cargo.toml")?)?;
if let Ok(ws_manifest) = Manifest::from_path(ws_path.join("Cargo.toml")) {
Manifest::complete_from_path_and_workspace(
&mut manifest,
Path::new("Cargo.toml"),
Some((&ws_manifest, ws_path.as_path())),
)?;
} else {
Manifest::complete_from_path(&mut manifest, Path::new("Cargo.toml"))?;
}
if let Some(tauri_build) = manifest.build_dependencies.remove("tauri-build") {
let error_message = check_features(&config, tauri_build, true);
if!error_message.is_empty() {
return Err(anyhow!("
The `tauri-build` dependency features on the `Cargo.toml` file does not match the allowlist defined under `tauri.conf.json`.
Please run `tauri dev` or `tauri build` or {}.
", error_message));
}
}
if let Some(tauri) = manifest.dependencies.remove("tauri") {
let error_message = check_features(&config, tauri, false);
if!error_message.is_empty() {
return Err(anyhow!("
The `tauri` dependency features on the `Cargo.toml` file does not match the allowlist defined under `tauri.conf.json`.
Please run `tauri dev` or `tauri build` or {}.
", error_message));
}
}
let target_triple = std::env::var("TARGET").unwrap();
let out_dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
// TODO: far from ideal, but there's no other way to get the target dir, see <https://github.com/rust-lang/cargo/issues/5457>
let target_dir = out_dir
.parent()
.unwrap()
.parent()
.unwrap()
.parent()
.unwrap();
if let Some(paths) = &config.tauri.bundle.external_bin {
copy_binaries(
ResourcePaths::new(external_binaries(paths, &target_triple).as_slice(), true),
&target_triple,
target_dir,
manifest.package.as_ref().map(|p| &p.name),
)?;
}
#[allow(unused_mut, clippy::redundant_clone)]
let mut resources = config.tauri.bundle.resources.clone().unwrap_or_default();
if target_triple.contains("windows") {
if let Some(fixed_webview2_runtime_path) =
&config.tauri.bundle.windows.webview_fixed_runtime_path
{
resources.push(fixed_webview2_runtime_path.display().to_string());
}
}
copy_resources(ResourcePaths::new(resources.as_slice(), true), target_dir)?;
if target_triple.contains("darwin") {
if let Some(version) = &config.tauri.bundle.macos.minimum_system_version {
println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET={}", version);
}
}
if target_triple.contains("windows") {
use anyhow::Context;
use semver::Version;
use tauri_winres::{VersionInfo, WindowsResource};
fn find_icon<F: Fn(&&String) -> bool>(config: &Config, predicate: F, default: &str) -> PathBuf {
let icon_path = config
.tauri
.bundle
.icon
.iter()
.find(|i| predicate(i))
.cloned()
.unwrap_or_else(|| default.to_string());
icon_path.into()
}
let window_icon_path = attributes
.windows_attributes
.window_icon_path
.unwrap_or_else(|| find_icon(&config, |i| i.ends_with(".ico"), "icons/icon.ico"));
if window_icon_path.exists() {
let mut res = WindowsResource::new();
if let Some(manifest) = attributes.windows_attributes.app_manifest {
res.set_manifest(&manifest);
} else {
res.set_manifest(include_str!("window-app-manifest.xml"));
}
if let Some(sdk_dir) = &attributes.windows_attributes.sdk_dir {
if let Some(sdk_dir_str) = sdk_dir.to_str() {
res.set_toolkit_path(sdk_dir_str);
} else {
return Err(anyhow!(
"sdk_dir path is not valid; only UTF-8 characters are allowed"
));
}
}
if let Some(version) = &config.package.version {
if let Ok(v) = Version::parse(version) {
let version = v.major << 48 | v.minor << 32 | v.patch << 16;
res.set_version_info(VersionInfo::FILEVERSION, version);
res.set_version_info(VersionInfo::PRODUCTVERSION, version);
}
res.set("FileVersion", version);
res.set("ProductVersion", version);
}
if let Some(product_name) = &config.package.product_name {
res.set("ProductName", product_name);
res.set("FileDescription", product_name);
}
res.set_icon_with_id(&window_icon_path.display().to_string(), "32512");
res.compile().with_context(|| {
format!(
"failed to compile `{}` into a Windows Resource file during tauri-build",
window_icon_path.display()
)
})?;
} else {
return Err(anyhow!(format!(
"`{}` not found; required for generating a Windows Resource file during tauri-build",
window_icon_path.display()
)));
}
let target_env = std::env::var("CARGO_CFG_TARGET_ENV").unwrap();
match target_env.as_str() {
"gnu" => {
let target_arch = match std::env::var("CARGO_CFG_TARGET_ARCH").unwrap().as_str() {
"x86_64" => Some("x64"),
"x86" => Some("x86"),
"aarch64" => Some("arm64"),
arch => None,
};
if let Some(target_arch) = target_arch {
for entry in std::fs::read_dir(target_dir.join("build"))? {
let path = entry?.path();
let webview2_loader_path = path
.join("out")
.join(target_arch)
.join("WebView2Loader.dll");
if path.to_string_lossy().contains("webview2-com-sys") && webview2_loader_path.exists()
{
std::fs::copy(webview2_loader_path, target_dir.join("WebView2Loader.dll"))?;
break;
}
}
}
}
"msvc" => {
if std::env::var("STATIC_VCRUNTIME").map_or(false, |v| v == "true") {
static_vcruntime::build();
}
}
_ => (),
}
}
Ok(())
}
#[derive(Debug, Default, PartialEq, Eq)]
struct Diff {
remove: Vec<String>,
add: Vec<String>,
}
fn features_diff(current: &[String], expected: &[String]) -> Diff {
let mut remove = Vec::new();
let mut add = Vec::new();
for feature in current {
if!expected.contains(feature) {
remove.push(feature.clone());
}
}
for feature in expected {
if!current.contains(feature) {
add.push(feature.clone());
}
}
Diff { remove, add }
}
fn check_features(config: &Config, dependency: Dependency, is_tauri_build: bool) -> String {
use tauri_utils::config::{PatternKind, TauriConfig};
let features = match dependency {
Dependency::Simple(_) => Vec::new(),
Dependency::Detailed(dep) => dep.features,
Dependency::Inherited(dep) => dep.features,
};
let all_cli_managed_features = if is_tauri_build {
vec!["isolation"]
} else {
TauriConfig::all_features()
};
let expected = if is_tauri_build {
match config.tauri.pattern {
PatternKind::Isolation {.. } => vec!["isolation".to_string()],
_ => vec![],
}
} else {
config
.tauri
.features()
.into_iter()
.map(|f| f.to_string())
.collect::<Vec<String>>()
};
let diff = features_diff(
&features
.into_iter()
.filter(|f| all_cli_managed_features.contains(&f.as_str()))
.collect::<Vec<String>>(),
&expected,
);
let mut error_message = String::new();
if!diff.remove.is_empty() {
error_message.push_str("remove the `");
error_message.push_str(&diff.remove.join(", "));
error_message.push_str(if diff.remove.len() == 1 {
"` feature"
} else {
"` features"
});
if!diff.add.is_empty() {
error_message.push_str(" and ");
}
}
if!diff.add.is_empty() {
error_message.push_str("add the `");
error_message.push_str(&diff.add.join(", "));
error_message.push_str(if diff.add.len() == 1 {
"` feature"
} else {
"` features"
});
}
error_message
}
#[derive(serde::Deserialize)]
struct CargoMetadata {
workspace_root: PathBuf,
}
fn get_workspace_dir() -> Result<PathBuf> {
let output = std::process::Command::new("cargo")
.args(["metadata", "--no-deps", "--format-version", "1"])
.output()?;
if!output.status.success() {
return Err(anyhow::anyhow!(
"cargo metadata command exited with a non zero exit code: {}",
String::from_utf8(output.stderr)?
));
}
Ok(serde_json::from_slice::<CargoMetadata>(&output.stdout)?.workspace_root)
}
#[cfg(test)]
mod tests {
use super::Diff;
#[test]
fn array_diff() {
for (current, expected, result) in [
(vec![], vec![], Default::default()),
(
vec!["a".into()],
vec![],
Diff {
remove: vec!["a".into()],
add: vec![],
},
),
(vec!["a".into()], vec!["a".into()], Default::default()),
(
vec!["a".into(), "b".into()],
vec!["a".into()],
Diff {
remove: vec!["b".into()],
add: vec![],
},
),
(
vec!["a".into(), "b".into()],
vec!["a".into(), "c".into()],
Diff {
remove: vec!["b".into()],
add: vec!["c".into()],
},
),
] {
assert_eq!(super::features_diff(¤t, &expected), result);
}
}
}
| {
Self::default()
} | identifier_body |
lib.rs | // Copyright 2019-2023 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
#![cfg_attr(doc_cfg, feature(doc_cfg))]
pub use anyhow::Result;
use cargo_toml::{Dependency, Manifest};
use heck::AsShoutySnakeCase;
use tauri_utils::{
config::Config,
resources::{external_binaries, resource_relpath, ResourcePaths},
};
use std::path::{Path, PathBuf};
#[cfg(feature = "codegen")]
mod codegen;
mod static_vcruntime;
#[cfg(feature = "codegen")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "codegen")))]
pub use codegen::context::CodegenContext;
fn copy_file(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result<()> {
let from = from.as_ref();
let to = to.as_ref();
if!from.exists() {
return Err(anyhow::anyhow!("{:?} does not exist", from));
}
if!from.is_file() {
return Err(anyhow::anyhow!("{:?} is not a file", from));
}
let dest_dir = to.parent().expect("No data in parent");
std::fs::create_dir_all(dest_dir)?;
std::fs::copy(from, to)?;
Ok(())
}
fn copy_binaries(
binaries: ResourcePaths,
target_triple: &str,
path: &Path,
package_name: Option<&String>,
) -> Result<()> {
for src in binaries {
let src = src?;
println!("cargo:rerun-if-changed={}", src.display());
let file_name = src
.file_name()
.expect("failed to extract external binary filename")
.to_string_lossy()
.replace(&format!("-{target_triple}"), "");
if package_name.map_or(false, |n| n == &file_name) {
return Err(anyhow::anyhow!(
"Cannot define a sidecar with the same name as the Cargo package name `{}`. Please change the sidecar name in the filesystem and the Tauri configuration.",
file_name
));
}
let dest = path.join(file_name);
if dest.exists() {
std::fs::remove_file(&dest).unwrap();
}
copy_file(&src, &dest)?;
}
Ok(())
}
/// Copies resources to a path.
fn copy_resources(resources: ResourcePaths<'_>, path: &Path) -> Result<()> {
for src in resources {
let src = src?;
println!("cargo:rerun-if-changed={}", src.display());
let dest = path.join(resource_relpath(&src));
copy_file(&src, dest)?;
}
Ok(())
}
// checks if the given Cargo feature is enabled.
fn has_feature(feature: &str) -> bool {
// when a feature is enabled, Cargo sets the `CARGO_FEATURE_<name` env var to 1
// https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts
std::env::var(format!("CARGO_FEATURE_{}", AsShoutySnakeCase(feature)))
.map(|x| x == "1")
.unwrap_or(false)
}
// creates a cfg alias if `has_feature` is true.
// `alias` must be a snake case string.
fn cfg_alias(alias: &str, has_feature: bool) {
if has_feature {
println!("cargo:rustc-cfg={alias}");
}
}
/// Attributes used on Windows.
#[allow(dead_code)]
#[derive(Debug, Default)]
pub struct WindowsAttributes {
window_icon_path: Option<PathBuf>,
/// The path to the sdk location.
///
/// For the GNU toolkit this has to be the path where MinGW put windres.exe and ar.exe.
/// This could be something like: "C:\Program Files\mingw-w64\x86_64-5.3.0-win32-seh-rt_v4-rev0\mingw64\bin"
///
/// For MSVC the Windows SDK has to be installed. It comes with the resource compiler rc.exe.
/// This should be set to the root directory of the Windows SDK, e.g., "C:\Program Files (x86)\Windows Kits\10" or,
/// if multiple 10 versions are installed, set it directly to the correct bin directory "C:\Program Files (x86)\Windows Kits\10\bin\10.0.14393.0\x64"
///
/// If it is left unset, it will look up a path in the registry, i.e. HKLM\SOFTWARE\Microsoft\Windows Kits\Installed Roots
sdk_dir: Option<PathBuf>,
/// A string containing an [application manifest] to be included with the application on Windows.
///
/// Defaults to:
/// ```ignore
#[doc = include_str!("window-app-manifest.xml")]
/// ```
///
/// [application manifest]: https://learn.microsoft.com/en-us/windows/win32/sbscs/application-manifests
app_manifest: Option<String>,
}
impl WindowsAttributes {
/// Creates the default attribute set.
pub fn new() -> Self {
Self::default()
}
/// Sets the icon to use on the window. Currently only used on Windows.
/// It must be in `ico` format. Defaults to `icons/icon.ico`.
#[must_use]
pub fn window_icon_path<P: AsRef<Path>>(mut self, window_icon_path: P) -> Self {
self
.window_icon_path
.replace(window_icon_path.as_ref().into());
self
}
/// Sets the sdk dir for windows. Currently only used on Windows. This must be a valid UTF-8
/// path. Defaults to whatever the `winres` crate determines is best.
#[must_use]
pub fn sdk_dir<P: AsRef<Path>>(mut self, sdk_dir: P) -> Self {
self.sdk_dir = Some(sdk_dir.as_ref().into());
self
}
/// Sets the Windows app [manifest].
///
/// # Example
///
/// The following manifest will brand the exe as requesting administrator privileges.
/// Thus, everytime it is executed, a Windows UAC dialog will appear.
///
/// Note that you can move the manifest contents to a separate file and use `include_str!("manifest.xml")`
/// instead of the inline string.
///
/// ```rust,no_run
/// let mut windows = tauri_build::WindowsAttributes::new();
/// windows = windows.app_manifest(r#"
/// <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
/// <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
/// <security>
/// <requestedPrivileges>
/// <requestedExecutionLevel level="requireAdministrator" uiAccess="false" />
/// </requestedPrivileges>
/// </security>
/// </trustInfo>
/// </assembly>
/// "#);
/// tauri_build::try_build(
/// tauri_build::Attributes::new().windows_attributes(windows)
/// ).expect("failed to run build script");
/// ```
///
/// Defaults to:
/// ```ignore
#[doc = include_str!("window-app-manifest.xml")]
/// [manifest]: https://learn.microsoft.com/en-us/windows/win32/sbscs/application-manifests
/// ```
#[must_use]
pub fn app_manifest<S: AsRef<str>>(mut self, manifest: S) -> Self {
self.app_manifest = Some(manifest.as_ref().to_string());
self
}
}
/// The attributes used on the build.
#[derive(Debug, Default)]
pub struct Attributes {
#[allow(dead_code)]
windows_attributes: WindowsAttributes,
}
impl Attributes {
/// Creates the default attribute set.
pub fn new() -> Self {
Self::default()
}
/// Sets the icon to use on the window. Currently only used on Windows.
#[must_use]
pub fn | (mut self, windows_attributes: WindowsAttributes) -> Self {
self.windows_attributes = windows_attributes;
self
}
}
/// Run all build time helpers for your Tauri Application.
///
/// The current helpers include the following:
/// * Generates a Windows Resource file when targeting Windows.
///
/// # Platforms
///
/// [`build()`] should be called inside of `build.rs` regardless of the platform:
/// * New helpers may target more platforms in the future.
/// * Platform specific code is handled by the helpers automatically.
/// * A build script is required in order to activate some cargo environmental variables that are
/// used when generating code and embedding assets - so [`build()`] may as well be called.
///
/// In short, this is saying don't put the call to [`build()`] behind a `#[cfg(windows)]`.
///
/// # Panics
///
/// If any of the build time helpers fail, they will [`std::panic!`] with the related error message.
/// This is typically desirable when running inside a build script; see [`try_build`] for no panics.
pub fn build() {
if let Err(error) = try_build(Attributes::default()) {
let error = format!("{error:#}");
println!("{error}");
if error.starts_with("unknown field") {
print!("found an unknown configuration field. This usually means that you are using a CLI version that is newer than `tauri-build` and is incompatible. ");
println!(
"Please try updating the Rust crates by running `cargo update` in the Tauri app folder."
);
}
std::process::exit(1);
}
}
/// Non-panicking [`build()`].
#[allow(unused_variables)]
pub fn try_build(attributes: Attributes) -> Result<()> {
use anyhow::anyhow;
println!("cargo:rerun-if-env-changed=TAURI_CONFIG");
println!("cargo:rerun-if-changed=tauri.conf.json");
#[cfg(feature = "config-json5")]
println!("cargo:rerun-if-changed=tauri.conf.json5");
#[cfg(feature = "config-toml")]
println!("cargo:rerun-if-changed=Tauri.toml");
let target_os = std::env::var("CARGO_CFG_TARGET_OS").unwrap();
let mobile = target_os == "ios" || target_os == "android";
cfg_alias("desktop",!mobile);
cfg_alias("mobile", mobile);
let mut config = serde_json::from_value(tauri_utils::config::parse::read_from(
std::env::current_dir().unwrap(),
)?)?;
if let Ok(env) = std::env::var("TAURI_CONFIG") {
let merge_config: serde_json::Value = serde_json::from_str(&env)?;
json_patch::merge(&mut config, &merge_config);
}
let config: Config = serde_json::from_value(config)?;
cfg_alias("dev",!has_feature("custom-protocol"));
let ws_path = get_workspace_dir()?;
let mut manifest =
Manifest::<cargo_toml::Value>::from_slice_with_metadata(&std::fs::read("Cargo.toml")?)?;
if let Ok(ws_manifest) = Manifest::from_path(ws_path.join("Cargo.toml")) {
Manifest::complete_from_path_and_workspace(
&mut manifest,
Path::new("Cargo.toml"),
Some((&ws_manifest, ws_path.as_path())),
)?;
} else {
Manifest::complete_from_path(&mut manifest, Path::new("Cargo.toml"))?;
}
if let Some(tauri_build) = manifest.build_dependencies.remove("tauri-build") {
let error_message = check_features(&config, tauri_build, true);
if!error_message.is_empty() {
return Err(anyhow!("
The `tauri-build` dependency features on the `Cargo.toml` file does not match the allowlist defined under `tauri.conf.json`.
Please run `tauri dev` or `tauri build` or {}.
", error_message));
}
}
if let Some(tauri) = manifest.dependencies.remove("tauri") {
let error_message = check_features(&config, tauri, false);
if!error_message.is_empty() {
return Err(anyhow!("
The `tauri` dependency features on the `Cargo.toml` file does not match the allowlist defined under `tauri.conf.json`.
Please run `tauri dev` or `tauri build` or {}.
", error_message));
}
}
let target_triple = std::env::var("TARGET").unwrap();
let out_dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
// TODO: far from ideal, but there's no other way to get the target dir, see <https://github.com/rust-lang/cargo/issues/5457>
let target_dir = out_dir
.parent()
.unwrap()
.parent()
.unwrap()
.parent()
.unwrap();
if let Some(paths) = &config.tauri.bundle.external_bin {
copy_binaries(
ResourcePaths::new(external_binaries(paths, &target_triple).as_slice(), true),
&target_triple,
target_dir,
manifest.package.as_ref().map(|p| &p.name),
)?;
}
#[allow(unused_mut, clippy::redundant_clone)]
let mut resources = config.tauri.bundle.resources.clone().unwrap_or_default();
if target_triple.contains("windows") {
if let Some(fixed_webview2_runtime_path) =
&config.tauri.bundle.windows.webview_fixed_runtime_path
{
resources.push(fixed_webview2_runtime_path.display().to_string());
}
}
copy_resources(ResourcePaths::new(resources.as_slice(), true), target_dir)?;
if target_triple.contains("darwin") {
if let Some(version) = &config.tauri.bundle.macos.minimum_system_version {
println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET={}", version);
}
}
if target_triple.contains("windows") {
use anyhow::Context;
use semver::Version;
use tauri_winres::{VersionInfo, WindowsResource};
fn find_icon<F: Fn(&&String) -> bool>(config: &Config, predicate: F, default: &str) -> PathBuf {
let icon_path = config
.tauri
.bundle
.icon
.iter()
.find(|i| predicate(i))
.cloned()
.unwrap_or_else(|| default.to_string());
icon_path.into()
}
let window_icon_path = attributes
.windows_attributes
.window_icon_path
.unwrap_or_else(|| find_icon(&config, |i| i.ends_with(".ico"), "icons/icon.ico"));
if window_icon_path.exists() {
let mut res = WindowsResource::new();
if let Some(manifest) = attributes.windows_attributes.app_manifest {
res.set_manifest(&manifest);
} else {
res.set_manifest(include_str!("window-app-manifest.xml"));
}
if let Some(sdk_dir) = &attributes.windows_attributes.sdk_dir {
if let Some(sdk_dir_str) = sdk_dir.to_str() {
res.set_toolkit_path(sdk_dir_str);
} else {
return Err(anyhow!(
"sdk_dir path is not valid; only UTF-8 characters are allowed"
));
}
}
if let Some(version) = &config.package.version {
if let Ok(v) = Version::parse(version) {
let version = v.major << 48 | v.minor << 32 | v.patch << 16;
res.set_version_info(VersionInfo::FILEVERSION, version);
res.set_version_info(VersionInfo::PRODUCTVERSION, version);
}
res.set("FileVersion", version);
res.set("ProductVersion", version);
}
if let Some(product_name) = &config.package.product_name {
res.set("ProductName", product_name);
res.set("FileDescription", product_name);
}
res.set_icon_with_id(&window_icon_path.display().to_string(), "32512");
res.compile().with_context(|| {
format!(
"failed to compile `{}` into a Windows Resource file during tauri-build",
window_icon_path.display()
)
})?;
} else {
return Err(anyhow!(format!(
"`{}` not found; required for generating a Windows Resource file during tauri-build",
window_icon_path.display()
)));
}
let target_env = std::env::var("CARGO_CFG_TARGET_ENV").unwrap();
match target_env.as_str() {
"gnu" => {
let target_arch = match std::env::var("CARGO_CFG_TARGET_ARCH").unwrap().as_str() {
"x86_64" => Some("x64"),
"x86" => Some("x86"),
"aarch64" => Some("arm64"),
arch => None,
};
if let Some(target_arch) = target_arch {
for entry in std::fs::read_dir(target_dir.join("build"))? {
let path = entry?.path();
let webview2_loader_path = path
.join("out")
.join(target_arch)
.join("WebView2Loader.dll");
if path.to_string_lossy().contains("webview2-com-sys") && webview2_loader_path.exists()
{
std::fs::copy(webview2_loader_path, target_dir.join("WebView2Loader.dll"))?;
break;
}
}
}
}
"msvc" => {
if std::env::var("STATIC_VCRUNTIME").map_or(false, |v| v == "true") {
static_vcruntime::build();
}
}
_ => (),
}
}
Ok(())
}
#[derive(Debug, Default, PartialEq, Eq)]
struct Diff {
remove: Vec<String>,
add: Vec<String>,
}
fn features_diff(current: &[String], expected: &[String]) -> Diff {
let mut remove = Vec::new();
let mut add = Vec::new();
for feature in current {
if!expected.contains(feature) {
remove.push(feature.clone());
}
}
for feature in expected {
if!current.contains(feature) {
add.push(feature.clone());
}
}
Diff { remove, add }
}
fn check_features(config: &Config, dependency: Dependency, is_tauri_build: bool) -> String {
use tauri_utils::config::{PatternKind, TauriConfig};
let features = match dependency {
Dependency::Simple(_) => Vec::new(),
Dependency::Detailed(dep) => dep.features,
Dependency::Inherited(dep) => dep.features,
};
let all_cli_managed_features = if is_tauri_build {
vec!["isolation"]
} else {
TauriConfig::all_features()
};
let expected = if is_tauri_build {
match config.tauri.pattern {
PatternKind::Isolation {.. } => vec!["isolation".to_string()],
_ => vec![],
}
} else {
config
.tauri
.features()
.into_iter()
.map(|f| f.to_string())
.collect::<Vec<String>>()
};
let diff = features_diff(
&features
.into_iter()
.filter(|f| all_cli_managed_features.contains(&f.as_str()))
.collect::<Vec<String>>(),
&expected,
);
let mut error_message = String::new();
if!diff.remove.is_empty() {
error_message.push_str("remove the `");
error_message.push_str(&diff.remove.join(", "));
error_message.push_str(if diff.remove.len() == 1 {
"` feature"
} else {
"` features"
});
if!diff.add.is_empty() {
error_message.push_str(" and ");
}
}
if!diff.add.is_empty() {
error_message.push_str("add the `");
error_message.push_str(&diff.add.join(", "));
error_message.push_str(if diff.add.len() == 1 {
"` feature"
} else {
"` features"
});
}
error_message
}
#[derive(serde::Deserialize)]
struct CargoMetadata {
workspace_root: PathBuf,
}
fn get_workspace_dir() -> Result<PathBuf> {
let output = std::process::Command::new("cargo")
.args(["metadata", "--no-deps", "--format-version", "1"])
.output()?;
if!output.status.success() {
return Err(anyhow::anyhow!(
"cargo metadata command exited with a non zero exit code: {}",
String::from_utf8(output.stderr)?
));
}
Ok(serde_json::from_slice::<CargoMetadata>(&output.stdout)?.workspace_root)
}
#[cfg(test)]
mod tests {
use super::Diff;
#[test]
fn array_diff() {
for (current, expected, result) in [
(vec![], vec![], Default::default()),
(
vec!["a".into()],
vec![],
Diff {
remove: vec!["a".into()],
add: vec![],
},
),
(vec!["a".into()], vec!["a".into()], Default::default()),
(
vec!["a".into(), "b".into()],
vec!["a".into()],
Diff {
remove: vec!["b".into()],
add: vec![],
},
),
(
vec!["a".into(), "b".into()],
vec!["a".into(), "c".into()],
Diff {
remove: vec!["b".into()],
add: vec!["c".into()],
},
),
] {
assert_eq!(super::features_diff(¤t, &expected), result);
}
}
}
| windows_attributes | identifier_name |
lib.rs | // Copyright 2019-2023 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
#![cfg_attr(doc_cfg, feature(doc_cfg))]
pub use anyhow::Result;
use cargo_toml::{Dependency, Manifest};
use heck::AsShoutySnakeCase;
use tauri_utils::{
config::Config,
resources::{external_binaries, resource_relpath, ResourcePaths},
};
use std::path::{Path, PathBuf};
#[cfg(feature = "codegen")]
mod codegen;
mod static_vcruntime;
#[cfg(feature = "codegen")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "codegen")))]
pub use codegen::context::CodegenContext;
fn copy_file(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result<()> {
let from = from.as_ref();
let to = to.as_ref();
if!from.exists() {
return Err(anyhow::anyhow!("{:?} does not exist", from));
}
if!from.is_file() {
return Err(anyhow::anyhow!("{:?} is not a file", from));
}
let dest_dir = to.parent().expect("No data in parent");
std::fs::create_dir_all(dest_dir)?;
std::fs::copy(from, to)?;
Ok(())
}
fn copy_binaries(
binaries: ResourcePaths,
target_triple: &str,
path: &Path,
package_name: Option<&String>,
) -> Result<()> {
for src in binaries {
let src = src?;
println!("cargo:rerun-if-changed={}", src.display());
let file_name = src
.file_name()
.expect("failed to extract external binary filename")
.to_string_lossy()
.replace(&format!("-{target_triple}"), "");
if package_name.map_or(false, |n| n == &file_name) {
return Err(anyhow::anyhow!(
"Cannot define a sidecar with the same name as the Cargo package name `{}`. Please change the sidecar name in the filesystem and the Tauri configuration.",
file_name
));
}
let dest = path.join(file_name);
if dest.exists() {
std::fs::remove_file(&dest).unwrap();
}
copy_file(&src, &dest)?;
}
Ok(())
}
/// Copies resources to a path.
fn copy_resources(resources: ResourcePaths<'_>, path: &Path) -> Result<()> {
for src in resources {
let src = src?;
println!("cargo:rerun-if-changed={}", src.display());
let dest = path.join(resource_relpath(&src));
copy_file(&src, dest)?;
}
Ok(())
}
// checks if the given Cargo feature is enabled.
fn has_feature(feature: &str) -> bool {
// when a feature is enabled, Cargo sets the `CARGO_FEATURE_<name` env var to 1
// https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts
std::env::var(format!("CARGO_FEATURE_{}", AsShoutySnakeCase(feature)))
.map(|x| x == "1")
.unwrap_or(false)
}
// creates a cfg alias if `has_feature` is true.
// `alias` must be a snake case string.
fn cfg_alias(alias: &str, has_feature: bool) {
if has_feature {
println!("cargo:rustc-cfg={alias}");
}
}
/// Attributes used on Windows.
#[allow(dead_code)]
#[derive(Debug, Default)]
pub struct WindowsAttributes {
window_icon_path: Option<PathBuf>,
/// The path to the sdk location.
///
/// For the GNU toolkit this has to be the path where MinGW put windres.exe and ar.exe.
/// This could be something like: "C:\Program Files\mingw-w64\x86_64-5.3.0-win32-seh-rt_v4-rev0\mingw64\bin"
///
/// For MSVC the Windows SDK has to be installed. It comes with the resource compiler rc.exe.
/// This should be set to the root directory of the Windows SDK, e.g., "C:\Program Files (x86)\Windows Kits\10" or,
/// if multiple 10 versions are installed, set it directly to the correct bin directory "C:\Program Files (x86)\Windows Kits\10\bin\10.0.14393.0\x64"
///
/// If it is left unset, it will look up a path in the registry, i.e. HKLM\SOFTWARE\Microsoft\Windows Kits\Installed Roots
sdk_dir: Option<PathBuf>,
/// A string containing an [application manifest] to be included with the application on Windows.
///
/// Defaults to:
/// ```ignore
#[doc = include_str!("window-app-manifest.xml")]
/// ```
///
/// [application manifest]: https://learn.microsoft.com/en-us/windows/win32/sbscs/application-manifests
app_manifest: Option<String>,
}
impl WindowsAttributes {
/// Creates the default attribute set.
pub fn new() -> Self {
Self::default()
}
/// Sets the icon to use on the window. Currently only used on Windows.
/// It must be in `ico` format. Defaults to `icons/icon.ico`.
#[must_use]
pub fn window_icon_path<P: AsRef<Path>>(mut self, window_icon_path: P) -> Self {
self
.window_icon_path
.replace(window_icon_path.as_ref().into());
self
}
/// Sets the sdk dir for windows. Currently only used on Windows. This must be a valid UTF-8
/// path. Defaults to whatever the `winres` crate determines is best.
#[must_use]
pub fn sdk_dir<P: AsRef<Path>>(mut self, sdk_dir: P) -> Self {
self.sdk_dir = Some(sdk_dir.as_ref().into());
self
}
/// Sets the Windows app [manifest].
///
/// # Example
///
/// The following manifest will brand the exe as requesting administrator privileges.
/// Thus, everytime it is executed, a Windows UAC dialog will appear.
///
/// Note that you can move the manifest contents to a separate file and use `include_str!("manifest.xml")`
/// instead of the inline string.
///
/// ```rust,no_run
/// let mut windows = tauri_build::WindowsAttributes::new();
/// windows = windows.app_manifest(r#"
/// <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
/// <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
/// <security>
/// <requestedPrivileges>
/// <requestedExecutionLevel level="requireAdministrator" uiAccess="false" />
/// </requestedPrivileges>
/// </security>
/// </trustInfo>
/// </assembly>
/// "#);
/// tauri_build::try_build(
/// tauri_build::Attributes::new().windows_attributes(windows)
/// ).expect("failed to run build script");
/// ```
///
/// Defaults to:
/// ```ignore
#[doc = include_str!("window-app-manifest.xml")]
/// [manifest]: https://learn.microsoft.com/en-us/windows/win32/sbscs/application-manifests
/// ```
#[must_use]
pub fn app_manifest<S: AsRef<str>>(mut self, manifest: S) -> Self {
self.app_manifest = Some(manifest.as_ref().to_string());
self
}
}
/// The attributes used on the build.
#[derive(Debug, Default)]
pub struct Attributes {
#[allow(dead_code)]
windows_attributes: WindowsAttributes,
}
impl Attributes {
/// Creates the default attribute set.
pub fn new() -> Self {
Self::default()
}
/// Sets the icon to use on the window. Currently only used on Windows.
#[must_use]
pub fn windows_attributes(mut self, windows_attributes: WindowsAttributes) -> Self {
self.windows_attributes = windows_attributes;
self
}
}
/// Run all build time helpers for your Tauri Application.
///
/// The current helpers include the following:
/// * Generates a Windows Resource file when targeting Windows.
///
/// # Platforms
///
/// [`build()`] should be called inside of `build.rs` regardless of the platform:
/// * New helpers may target more platforms in the future.
/// * Platform specific code is handled by the helpers automatically.
/// * A build script is required in order to activate some cargo environmental variables that are
/// used when generating code and embedding assets - so [`build()`] may as well be called.
///
/// In short, this is saying don't put the call to [`build()`] behind a `#[cfg(windows)]`.
///
/// # Panics
///
/// If any of the build time helpers fail, they will [`std::panic!`] with the related error message.
/// This is typically desirable when running inside a build script; see [`try_build`] for no panics.
pub fn build() {
if let Err(error) = try_build(Attributes::default()) {
let error = format!("{error:#}");
println!("{error}");
if error.starts_with("unknown field") {
print!("found an unknown configuration field. This usually means that you are using a CLI version that is newer than `tauri-build` and is incompatible. ");
println!(
"Please try updating the Rust crates by running `cargo update` in the Tauri app folder."
);
}
std::process::exit(1);
}
}
/// Non-panicking [`build()`].
#[allow(unused_variables)]
pub fn try_build(attributes: Attributes) -> Result<()> {
use anyhow::anyhow;
println!("cargo:rerun-if-env-changed=TAURI_CONFIG");
println!("cargo:rerun-if-changed=tauri.conf.json");
#[cfg(feature = "config-json5")]
println!("cargo:rerun-if-changed=tauri.conf.json5");
#[cfg(feature = "config-toml")]
println!("cargo:rerun-if-changed=Tauri.toml");
let target_os = std::env::var("CARGO_CFG_TARGET_OS").unwrap();
let mobile = target_os == "ios" || target_os == "android";
cfg_alias("desktop",!mobile);
cfg_alias("mobile", mobile);
let mut config = serde_json::from_value(tauri_utils::config::parse::read_from(
std::env::current_dir().unwrap(),
)?)?;
if let Ok(env) = std::env::var("TAURI_CONFIG") {
let merge_config: serde_json::Value = serde_json::from_str(&env)?;
json_patch::merge(&mut config, &merge_config);
}
let config: Config = serde_json::from_value(config)?;
cfg_alias("dev",!has_feature("custom-protocol"));
let ws_path = get_workspace_dir()?;
let mut manifest =
Manifest::<cargo_toml::Value>::from_slice_with_metadata(&std::fs::read("Cargo.toml")?)?;
if let Ok(ws_manifest) = Manifest::from_path(ws_path.join("Cargo.toml")) {
Manifest::complete_from_path_and_workspace(
&mut manifest,
Path::new("Cargo.toml"),
Some((&ws_manifest, ws_path.as_path())),
)?;
} else {
Manifest::complete_from_path(&mut manifest, Path::new("Cargo.toml"))?;
}
if let Some(tauri_build) = manifest.build_dependencies.remove("tauri-build") {
let error_message = check_features(&config, tauri_build, true);
if!error_message.is_empty() {
return Err(anyhow!("
The `tauri-build` dependency features on the `Cargo.toml` file does not match the allowlist defined under `tauri.conf.json`.
Please run `tauri dev` or `tauri build` or {}.
", error_message));
}
}
if let Some(tauri) = manifest.dependencies.remove("tauri") {
let error_message = check_features(&config, tauri, false);
if!error_message.is_empty() {
return Err(anyhow!("
The `tauri` dependency features on the `Cargo.toml` file does not match the allowlist defined under `tauri.conf.json`.
Please run `tauri dev` or `tauri build` or {}.
", error_message));
}
}
let target_triple = std::env::var("TARGET").unwrap();
let out_dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
// TODO: far from ideal, but there's no other way to get the target dir, see <https://github.com/rust-lang/cargo/issues/5457>
let target_dir = out_dir
.parent()
.unwrap()
.parent()
.unwrap()
.parent()
.unwrap();
if let Some(paths) = &config.tauri.bundle.external_bin {
copy_binaries(
ResourcePaths::new(external_binaries(paths, &target_triple).as_slice(), true),
&target_triple,
target_dir,
manifest.package.as_ref().map(|p| &p.name),
)?;
}
#[allow(unused_mut, clippy::redundant_clone)]
let mut resources = config.tauri.bundle.resources.clone().unwrap_or_default();
if target_triple.contains("windows") {
if let Some(fixed_webview2_runtime_path) =
&config.tauri.bundle.windows.webview_fixed_runtime_path
{
resources.push(fixed_webview2_runtime_path.display().to_string());
}
}
copy_resources(ResourcePaths::new(resources.as_slice(), true), target_dir)?;
if target_triple.contains("darwin") {
if let Some(version) = &config.tauri.bundle.macos.minimum_system_version {
println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET={}", version);
}
}
if target_triple.contains("windows") {
use anyhow::Context;
use semver::Version;
use tauri_winres::{VersionInfo, WindowsResource};
fn find_icon<F: Fn(&&String) -> bool>(config: &Config, predicate: F, default: &str) -> PathBuf {
let icon_path = config
.tauri
.bundle
.icon
.iter()
.find(|i| predicate(i))
.cloned()
.unwrap_or_else(|| default.to_string());
icon_path.into()
}
let window_icon_path = attributes
.windows_attributes
.window_icon_path
.unwrap_or_else(|| find_icon(&config, |i| i.ends_with(".ico"), "icons/icon.ico"));
if window_icon_path.exists() {
let mut res = WindowsResource::new();
if let Some(manifest) = attributes.windows_attributes.app_manifest {
res.set_manifest(&manifest);
} else {
res.set_manifest(include_str!("window-app-manifest.xml"));
}
if let Some(sdk_dir) = &attributes.windows_attributes.sdk_dir {
if let Some(sdk_dir_str) = sdk_dir.to_str() {
res.set_toolkit_path(sdk_dir_str);
} else {
return Err(anyhow!(
"sdk_dir path is not valid; only UTF-8 characters are allowed"
));
}
}
if let Some(version) = &config.package.version {
if let Ok(v) = Version::parse(version) {
let version = v.major << 48 | v.minor << 32 | v.patch << 16;
res.set_version_info(VersionInfo::FILEVERSION, version);
res.set_version_info(VersionInfo::PRODUCTVERSION, version);
}
res.set("FileVersion", version);
res.set("ProductVersion", version);
}
if let Some(product_name) = &config.package.product_name {
res.set("ProductName", product_name);
res.set("FileDescription", product_name);
}
res.set_icon_with_id(&window_icon_path.display().to_string(), "32512");
res.compile().with_context(|| {
format!(
"failed to compile `{}` into a Windows Resource file during tauri-build",
window_icon_path.display()
)
})?;
} else {
return Err(anyhow!(format!(
"`{}` not found; required for generating a Windows Resource file during tauri-build",
window_icon_path.display()
)));
}
let target_env = std::env::var("CARGO_CFG_TARGET_ENV").unwrap();
match target_env.as_str() {
"gnu" => {
let target_arch = match std::env::var("CARGO_CFG_TARGET_ARCH").unwrap().as_str() {
"x86_64" => Some("x64"),
"x86" => Some("x86"),
"aarch64" => Some("arm64"),
arch => None,
};
if let Some(target_arch) = target_arch {
for entry in std::fs::read_dir(target_dir.join("build"))? {
let path = entry?.path();
let webview2_loader_path = path
.join("out")
.join(target_arch)
.join("WebView2Loader.dll");
if path.to_string_lossy().contains("webview2-com-sys") && webview2_loader_path.exists()
{
std::fs::copy(webview2_loader_path, target_dir.join("WebView2Loader.dll"))?;
break;
}
}
}
}
"msvc" => {
if std::env::var("STATIC_VCRUNTIME").map_or(false, |v| v == "true") {
static_vcruntime::build();
}
}
_ => (),
}
}
Ok(())
}
#[derive(Debug, Default, PartialEq, Eq)]
struct Diff {
remove: Vec<String>,
add: Vec<String>,
}
fn features_diff(current: &[String], expected: &[String]) -> Diff {
let mut remove = Vec::new();
let mut add = Vec::new();
for feature in current {
if!expected.contains(feature) {
remove.push(feature.clone());
}
}
for feature in expected {
if!current.contains(feature) {
add.push(feature.clone());
}
}
Diff { remove, add }
}
fn check_features(config: &Config, dependency: Dependency, is_tauri_build: bool) -> String {
use tauri_utils::config::{PatternKind, TauriConfig};
let features = match dependency {
Dependency::Simple(_) => Vec::new(),
Dependency::Detailed(dep) => dep.features,
Dependency::Inherited(dep) => dep.features,
};
let all_cli_managed_features = if is_tauri_build {
vec!["isolation"]
} else {
TauriConfig::all_features()
};
let expected = if is_tauri_build {
match config.tauri.pattern {
PatternKind::Isolation {.. } => vec!["isolation".to_string()],
_ => vec![],
}
} else | ;
let diff = features_diff(
&features
.into_iter()
.filter(|f| all_cli_managed_features.contains(&f.as_str()))
.collect::<Vec<String>>(),
&expected,
);
let mut error_message = String::new();
if!diff.remove.is_empty() {
error_message.push_str("remove the `");
error_message.push_str(&diff.remove.join(", "));
error_message.push_str(if diff.remove.len() == 1 {
"` feature"
} else {
"` features"
});
if!diff.add.is_empty() {
error_message.push_str(" and ");
}
}
if!diff.add.is_empty() {
error_message.push_str("add the `");
error_message.push_str(&diff.add.join(", "));
error_message.push_str(if diff.add.len() == 1 {
"` feature"
} else {
"` features"
});
}
error_message
}
#[derive(serde::Deserialize)]
struct CargoMetadata {
workspace_root: PathBuf,
}
fn get_workspace_dir() -> Result<PathBuf> {
let output = std::process::Command::new("cargo")
.args(["metadata", "--no-deps", "--format-version", "1"])
.output()?;
if!output.status.success() {
return Err(anyhow::anyhow!(
"cargo metadata command exited with a non zero exit code: {}",
String::from_utf8(output.stderr)?
));
}
Ok(serde_json::from_slice::<CargoMetadata>(&output.stdout)?.workspace_root)
}
#[cfg(test)]
mod tests {
use super::Diff;
#[test]
fn array_diff() {
for (current, expected, result) in [
(vec![], vec![], Default::default()),
(
vec!["a".into()],
vec![],
Diff {
remove: vec!["a".into()],
add: vec![],
},
),
(vec!["a".into()], vec!["a".into()], Default::default()),
(
vec!["a".into(), "b".into()],
vec!["a".into()],
Diff {
remove: vec!["b".into()],
add: vec![],
},
),
(
vec!["a".into(), "b".into()],
vec!["a".into(), "c".into()],
Diff {
remove: vec!["b".into()],
add: vec!["c".into()],
},
),
] {
assert_eq!(super::features_diff(¤t, &expected), result);
}
}
}
| {
config
.tauri
.features()
.into_iter()
.map(|f| f.to_string())
.collect::<Vec<String>>()
} | conditional_block |
lib.rs | // Copyright 2019-2023 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
#![cfg_attr(doc_cfg, feature(doc_cfg))]
pub use anyhow::Result;
use cargo_toml::{Dependency, Manifest};
use heck::AsShoutySnakeCase;
use tauri_utils::{
config::Config,
resources::{external_binaries, resource_relpath, ResourcePaths},
};
use std::path::{Path, PathBuf};
#[cfg(feature = "codegen")]
mod codegen;
mod static_vcruntime;
#[cfg(feature = "codegen")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "codegen")))]
pub use codegen::context::CodegenContext;
fn copy_file(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result<()> {
let from = from.as_ref();
let to = to.as_ref();
if!from.exists() {
return Err(anyhow::anyhow!("{:?} does not exist", from));
}
if!from.is_file() {
return Err(anyhow::anyhow!("{:?} is not a file", from));
}
let dest_dir = to.parent().expect("No data in parent");
std::fs::create_dir_all(dest_dir)?;
std::fs::copy(from, to)?;
Ok(())
}
fn copy_binaries(
binaries: ResourcePaths,
target_triple: &str,
path: &Path,
package_name: Option<&String>,
) -> Result<()> {
for src in binaries {
let src = src?;
println!("cargo:rerun-if-changed={}", src.display());
let file_name = src
.file_name()
.expect("failed to extract external binary filename")
.to_string_lossy()
.replace(&format!("-{target_triple}"), "");
if package_name.map_or(false, |n| n == &file_name) {
return Err(anyhow::anyhow!(
"Cannot define a sidecar with the same name as the Cargo package name `{}`. Please change the sidecar name in the filesystem and the Tauri configuration.",
file_name
));
}
let dest = path.join(file_name);
if dest.exists() {
std::fs::remove_file(&dest).unwrap();
}
copy_file(&src, &dest)?;
}
Ok(())
}
/// Copies resources to a path.
fn copy_resources(resources: ResourcePaths<'_>, path: &Path) -> Result<()> {
for src in resources {
let src = src?;
println!("cargo:rerun-if-changed={}", src.display());
let dest = path.join(resource_relpath(&src));
copy_file(&src, dest)?;
}
Ok(())
}
// checks if the given Cargo feature is enabled.
fn has_feature(feature: &str) -> bool {
// when a feature is enabled, Cargo sets the `CARGO_FEATURE_<name` env var to 1
// https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts
std::env::var(format!("CARGO_FEATURE_{}", AsShoutySnakeCase(feature)))
.map(|x| x == "1")
.unwrap_or(false)
}
// creates a cfg alias if `has_feature` is true.
// `alias` must be a snake case string.
fn cfg_alias(alias: &str, has_feature: bool) {
if has_feature {
println!("cargo:rustc-cfg={alias}");
}
}
/// Attributes used on Windows.
#[allow(dead_code)]
#[derive(Debug, Default)]
pub struct WindowsAttributes {
window_icon_path: Option<PathBuf>,
/// The path to the sdk location.
///
/// For the GNU toolkit this has to be the path where MinGW put windres.exe and ar.exe.
/// This could be something like: "C:\Program Files\mingw-w64\x86_64-5.3.0-win32-seh-rt_v4-rev0\mingw64\bin"
///
/// For MSVC the Windows SDK has to be installed. It comes with the resource compiler rc.exe.
/// This should be set to the root directory of the Windows SDK, e.g., "C:\Program Files (x86)\Windows Kits\10" or,
/// if multiple 10 versions are installed, set it directly to the correct bin directory "C:\Program Files (x86)\Windows Kits\10\bin\10.0.14393.0\x64"
///
/// If it is left unset, it will look up a path in the registry, i.e. HKLM\SOFTWARE\Microsoft\Windows Kits\Installed Roots
sdk_dir: Option<PathBuf>,
/// A string containing an [application manifest] to be included with the application on Windows.
///
/// Defaults to:
/// ```ignore
#[doc = include_str!("window-app-manifest.xml")]
/// ```
///
/// [application manifest]: https://learn.microsoft.com/en-us/windows/win32/sbscs/application-manifests
app_manifest: Option<String>,
}
impl WindowsAttributes {
/// Creates the default attribute set.
pub fn new() -> Self {
Self::default()
}
/// Sets the icon to use on the window. Currently only used on Windows.
/// It must be in `ico` format. Defaults to `icons/icon.ico`.
#[must_use]
pub fn window_icon_path<P: AsRef<Path>>(mut self, window_icon_path: P) -> Self {
self
.window_icon_path
.replace(window_icon_path.as_ref().into());
self
}
/// Sets the sdk dir for windows. Currently only used on Windows. This must be a valid UTF-8
/// path. Defaults to whatever the `winres` crate determines is best.
#[must_use]
pub fn sdk_dir<P: AsRef<Path>>(mut self, sdk_dir: P) -> Self {
self.sdk_dir = Some(sdk_dir.as_ref().into());
self
}
/// Sets the Windows app [manifest].
///
/// # Example
///
/// The following manifest will brand the exe as requesting administrator privileges.
/// Thus, everytime it is executed, a Windows UAC dialog will appear.
///
/// Note that you can move the manifest contents to a separate file and use `include_str!("manifest.xml")`
/// instead of the inline string.
///
/// ```rust,no_run
/// let mut windows = tauri_build::WindowsAttributes::new();
/// windows = windows.app_manifest(r#"
/// <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
/// <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
/// <security>
/// <requestedPrivileges>
/// <requestedExecutionLevel level="requireAdministrator" uiAccess="false" />
/// </requestedPrivileges>
/// </security>
/// </trustInfo>
/// </assembly>
/// "#);
/// tauri_build::try_build(
/// tauri_build::Attributes::new().windows_attributes(windows)
/// ).expect("failed to run build script");
/// ```
///
/// Defaults to:
/// ```ignore
#[doc = include_str!("window-app-manifest.xml")]
/// [manifest]: https://learn.microsoft.com/en-us/windows/win32/sbscs/application-manifests
/// ```
#[must_use]
pub fn app_manifest<S: AsRef<str>>(mut self, manifest: S) -> Self {
self.app_manifest = Some(manifest.as_ref().to_string());
self
}
}
/// The attributes used on the build.
#[derive(Debug, Default)]
pub struct Attributes {
#[allow(dead_code)]
windows_attributes: WindowsAttributes,
}
impl Attributes {
/// Creates the default attribute set.
pub fn new() -> Self {
Self::default()
}
/// Sets the icon to use on the window. Currently only used on Windows.
#[must_use]
pub fn windows_attributes(mut self, windows_attributes: WindowsAttributes) -> Self {
self.windows_attributes = windows_attributes;
self
}
}
/// Run all build time helpers for your Tauri Application.
///
/// The current helpers include the following:
/// * Generates a Windows Resource file when targeting Windows.
///
/// # Platforms
///
/// [`build()`] should be called inside of `build.rs` regardless of the platform:
/// * New helpers may target more platforms in the future.
/// * Platform specific code is handled by the helpers automatically.
/// * A build script is required in order to activate some cargo environmental variables that are
/// used when generating code and embedding assets - so [`build()`] may as well be called.
///
/// In short, this is saying don't put the call to [`build()`] behind a `#[cfg(windows)]`.
///
/// # Panics
///
/// If any of the build time helpers fail, they will [`std::panic!`] with the related error message.
/// This is typically desirable when running inside a build script; see [`try_build`] for no panics.
pub fn build() {
if let Err(error) = try_build(Attributes::default()) {
let error = format!("{error:#}");
println!("{error}");
if error.starts_with("unknown field") {
print!("found an unknown configuration field. This usually means that you are using a CLI version that is newer than `tauri-build` and is incompatible. ");
println!(
"Please try updating the Rust crates by running `cargo update` in the Tauri app folder."
);
}
std::process::exit(1);
}
}
/// Non-panicking [`build()`].
#[allow(unused_variables)]
pub fn try_build(attributes: Attributes) -> Result<()> {
use anyhow::anyhow;
println!("cargo:rerun-if-env-changed=TAURI_CONFIG");
println!("cargo:rerun-if-changed=tauri.conf.json");
#[cfg(feature = "config-json5")]
println!("cargo:rerun-if-changed=tauri.conf.json5");
#[cfg(feature = "config-toml")]
println!("cargo:rerun-if-changed=Tauri.toml");
let target_os = std::env::var("CARGO_CFG_TARGET_OS").unwrap();
let mobile = target_os == "ios" || target_os == "android";
cfg_alias("desktop",!mobile);
cfg_alias("mobile", mobile);
let mut config = serde_json::from_value(tauri_utils::config::parse::read_from(
std::env::current_dir().unwrap(),
)?)?;
if let Ok(env) = std::env::var("TAURI_CONFIG") {
let merge_config: serde_json::Value = serde_json::from_str(&env)?;
json_patch::merge(&mut config, &merge_config);
}
let config: Config = serde_json::from_value(config)?;
cfg_alias("dev",!has_feature("custom-protocol"));
let ws_path = get_workspace_dir()?;
let mut manifest =
Manifest::<cargo_toml::Value>::from_slice_with_metadata(&std::fs::read("Cargo.toml")?)?;
if let Ok(ws_manifest) = Manifest::from_path(ws_path.join("Cargo.toml")) {
Manifest::complete_from_path_and_workspace(
&mut manifest,
Path::new("Cargo.toml"),
Some((&ws_manifest, ws_path.as_path())),
)?;
} else {
Manifest::complete_from_path(&mut manifest, Path::new("Cargo.toml"))?;
}
if let Some(tauri_build) = manifest.build_dependencies.remove("tauri-build") {
let error_message = check_features(&config, tauri_build, true);
if!error_message.is_empty() {
return Err(anyhow!("
The `tauri-build` dependency features on the `Cargo.toml` file does not match the allowlist defined under `tauri.conf.json`.
Please run `tauri dev` or `tauri build` or {}.
", error_message));
}
}
if let Some(tauri) = manifest.dependencies.remove("tauri") {
let error_message = check_features(&config, tauri, false);
if!error_message.is_empty() {
return Err(anyhow!("
The `tauri` dependency features on the `Cargo.toml` file does not match the allowlist defined under `tauri.conf.json`.
Please run `tauri dev` or `tauri build` or {}.
", error_message));
}
}
let target_triple = std::env::var("TARGET").unwrap();
let out_dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
// TODO: far from ideal, but there's no other way to get the target dir, see <https://github.com/rust-lang/cargo/issues/5457>
let target_dir = out_dir
.parent()
.unwrap()
.parent()
.unwrap()
.parent()
.unwrap();
if let Some(paths) = &config.tauri.bundle.external_bin {
copy_binaries(
ResourcePaths::new(external_binaries(paths, &target_triple).as_slice(), true),
&target_triple,
target_dir,
manifest.package.as_ref().map(|p| &p.name),
)?;
}
#[allow(unused_mut, clippy::redundant_clone)]
let mut resources = config.tauri.bundle.resources.clone().unwrap_or_default();
if target_triple.contains("windows") {
if let Some(fixed_webview2_runtime_path) =
&config.tauri.bundle.windows.webview_fixed_runtime_path
{
resources.push(fixed_webview2_runtime_path.display().to_string());
}
}
copy_resources(ResourcePaths::new(resources.as_slice(), true), target_dir)?;
if target_triple.contains("darwin") {
if let Some(version) = &config.tauri.bundle.macos.minimum_system_version {
println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET={}", version);
}
}
if target_triple.contains("windows") {
use anyhow::Context;
use semver::Version;
use tauri_winres::{VersionInfo, WindowsResource};
fn find_icon<F: Fn(&&String) -> bool>(config: &Config, predicate: F, default: &str) -> PathBuf {
let icon_path = config | .icon
.iter()
.find(|i| predicate(i))
.cloned()
.unwrap_or_else(|| default.to_string());
icon_path.into()
}
let window_icon_path = attributes
.windows_attributes
.window_icon_path
.unwrap_or_else(|| find_icon(&config, |i| i.ends_with(".ico"), "icons/icon.ico"));
if window_icon_path.exists() {
let mut res = WindowsResource::new();
if let Some(manifest) = attributes.windows_attributes.app_manifest {
res.set_manifest(&manifest);
} else {
res.set_manifest(include_str!("window-app-manifest.xml"));
}
if let Some(sdk_dir) = &attributes.windows_attributes.sdk_dir {
if let Some(sdk_dir_str) = sdk_dir.to_str() {
res.set_toolkit_path(sdk_dir_str);
} else {
return Err(anyhow!(
"sdk_dir path is not valid; only UTF-8 characters are allowed"
));
}
}
if let Some(version) = &config.package.version {
if let Ok(v) = Version::parse(version) {
let version = v.major << 48 | v.minor << 32 | v.patch << 16;
res.set_version_info(VersionInfo::FILEVERSION, version);
res.set_version_info(VersionInfo::PRODUCTVERSION, version);
}
res.set("FileVersion", version);
res.set("ProductVersion", version);
}
if let Some(product_name) = &config.package.product_name {
res.set("ProductName", product_name);
res.set("FileDescription", product_name);
}
res.set_icon_with_id(&window_icon_path.display().to_string(), "32512");
res.compile().with_context(|| {
format!(
"failed to compile `{}` into a Windows Resource file during tauri-build",
window_icon_path.display()
)
})?;
} else {
return Err(anyhow!(format!(
"`{}` not found; required for generating a Windows Resource file during tauri-build",
window_icon_path.display()
)));
}
let target_env = std::env::var("CARGO_CFG_TARGET_ENV").unwrap();
match target_env.as_str() {
"gnu" => {
let target_arch = match std::env::var("CARGO_CFG_TARGET_ARCH").unwrap().as_str() {
"x86_64" => Some("x64"),
"x86" => Some("x86"),
"aarch64" => Some("arm64"),
arch => None,
};
if let Some(target_arch) = target_arch {
for entry in std::fs::read_dir(target_dir.join("build"))? {
let path = entry?.path();
let webview2_loader_path = path
.join("out")
.join(target_arch)
.join("WebView2Loader.dll");
if path.to_string_lossy().contains("webview2-com-sys") && webview2_loader_path.exists()
{
std::fs::copy(webview2_loader_path, target_dir.join("WebView2Loader.dll"))?;
break;
}
}
}
}
"msvc" => {
if std::env::var("STATIC_VCRUNTIME").map_or(false, |v| v == "true") {
static_vcruntime::build();
}
}
_ => (),
}
}
Ok(())
}
#[derive(Debug, Default, PartialEq, Eq)]
struct Diff {
remove: Vec<String>,
add: Vec<String>,
}
fn features_diff(current: &[String], expected: &[String]) -> Diff {
let mut remove = Vec::new();
let mut add = Vec::new();
for feature in current {
if!expected.contains(feature) {
remove.push(feature.clone());
}
}
for feature in expected {
if!current.contains(feature) {
add.push(feature.clone());
}
}
Diff { remove, add }
}
fn check_features(config: &Config, dependency: Dependency, is_tauri_build: bool) -> String {
use tauri_utils::config::{PatternKind, TauriConfig};
let features = match dependency {
Dependency::Simple(_) => Vec::new(),
Dependency::Detailed(dep) => dep.features,
Dependency::Inherited(dep) => dep.features,
};
let all_cli_managed_features = if is_tauri_build {
vec!["isolation"]
} else {
TauriConfig::all_features()
};
let expected = if is_tauri_build {
match config.tauri.pattern {
PatternKind::Isolation {.. } => vec!["isolation".to_string()],
_ => vec![],
}
} else {
config
.tauri
.features()
.into_iter()
.map(|f| f.to_string())
.collect::<Vec<String>>()
};
let diff = features_diff(
&features
.into_iter()
.filter(|f| all_cli_managed_features.contains(&f.as_str()))
.collect::<Vec<String>>(),
&expected,
);
let mut error_message = String::new();
if!diff.remove.is_empty() {
error_message.push_str("remove the `");
error_message.push_str(&diff.remove.join(", "));
error_message.push_str(if diff.remove.len() == 1 {
"` feature"
} else {
"` features"
});
if!diff.add.is_empty() {
error_message.push_str(" and ");
}
}
if!diff.add.is_empty() {
error_message.push_str("add the `");
error_message.push_str(&diff.add.join(", "));
error_message.push_str(if diff.add.len() == 1 {
"` feature"
} else {
"` features"
});
}
error_message
}
#[derive(serde::Deserialize)]
struct CargoMetadata {
workspace_root: PathBuf,
}
fn get_workspace_dir() -> Result<PathBuf> {
let output = std::process::Command::new("cargo")
.args(["metadata", "--no-deps", "--format-version", "1"])
.output()?;
if!output.status.success() {
return Err(anyhow::anyhow!(
"cargo metadata command exited with a non zero exit code: {}",
String::from_utf8(output.stderr)?
));
}
Ok(serde_json::from_slice::<CargoMetadata>(&output.stdout)?.workspace_root)
}
#[cfg(test)]
mod tests {
use super::Diff;
#[test]
fn array_diff() {
for (current, expected, result) in [
(vec![], vec![], Default::default()),
(
vec!["a".into()],
vec![],
Diff {
remove: vec!["a".into()],
add: vec![],
},
),
(vec!["a".into()], vec!["a".into()], Default::default()),
(
vec!["a".into(), "b".into()],
vec!["a".into()],
Diff {
remove: vec!["b".into()],
add: vec![],
},
),
(
vec!["a".into(), "b".into()],
vec!["a".into(), "c".into()],
Diff {
remove: vec!["b".into()],
add: vec!["c".into()],
},
),
] {
assert_eq!(super::features_diff(¤t, &expected), result);
}
}
} | .tauri
.bundle | random_line_split |
main.rs | use clap::{App, AppSettings, Arg, SubCommand};
use default_boxed::DefaultBoxed;
#[derive(DefaultBoxed)]
struct Outer<'a, 'b> {
inner: HeapApp<'a, 'b>,
}
struct HeapApp<'a, 'b> {
app: App<'a, 'b>,
}
impl<'a, 'b> Default for HeapApp<'a, 'b> {
fn default() -> Self {
let mut app = App::new("serviceusage1_beta1")
.setting(clap::AppSettings::ColoredHelp)
.author("Sebastian Thiel <[email protected]>")
.version("0.1.0-20210317")
.about("Enables services that service consumers want to use on Google Cloud Platform, lists the available or enabled services, or disables services that service consumers no longer use.")
.after_help("All documentation details can be found at <TODO figure out URL>")
.arg(Arg::with_name("scope")
.long("scope")
.help("Specify the authentication method should be executed in. Each scope requires the user to grant this application permission to use it. If unset, it defaults to the shortest scope url for a particular method.")
.multiple(true)
.takes_value(true))
.arg(Arg::with_name("folder")
.long("config-dir")
.help("A directory into which we will store our persistent data. Defaults to a user-writable directory that we will create during the first invocation." )
.multiple(false)
.takes_value(true))
.arg(Arg::with_name("debug")
.long("debug")
.help("Provide more output to aid with debugging")
.multiple(false)
.takes_value(false));
let mut operations0 = SubCommand::with_name("operations")
.setting(AppSettings::ColoredHelp)
.about("methods: get and list");
{
let mcmd = SubCommand::with_name("get").about("Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.");
operations0 = operations0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("list").about("Lists operations that match the specified filter in the request. If the server doesn\'t support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `\"/v1/{name=users/*}/operations\"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.");
operations0 = operations0.subcommand(mcmd);
}
let mut services0 = SubCommand::with_name("services")
.setting(AppSettings::ColoredHelp)
.about(
"methods: batch_enable, disable, enable, generate_service_identity, get and list",
);
{
let mcmd = SubCommand::with_name("batch_enable").about("Enable multiple services on a project. The operation is atomic: if enabling any service fails, then the entire batch fails, and no state changes occur. Operation");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("disable").about("Disable a service so that it can no longer be used with a project. This prevents unintended usage that may cause unexpected billing charges or security leaks. It is not valid to call the disable method on a service that is not currently enabled. Callers will receive a `FAILED_PRECONDITION` status if the target service is not currently enabled. Operation");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("enable")
.about("Enable a service so that it can be used with a project. Operation");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("generate_service_identity")
.about("Generate service identity for service.");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("get")
.about("Returns the service configuration and enabled state for a given service.");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("list").about("List all services available to the specified project, and the current state of those services with respect to the project. The list includes all public services, all services for which the calling user has the `servicemanagement.services.bind` permission, and all services that have already been enabled on the project. The list can be filtered to only include services in a specific state, for example to only include services enabled on the project.");
services0 = services0.subcommand(mcmd);
}
let mut consumer_quota_metrics1 = SubCommand::with_name("consumer_quota_metrics")
.setting(AppSettings::ColoredHelp)
.about("methods: get, import_admin_overrides, import_consumer_overrides and list");
{
let mcmd = SubCommand::with_name("get")
.about("Retrieves a summary of quota information for a specific quota metric");
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("import_admin_overrides").about("Create or update multiple admin overrides atomically, all on the same consumer, but on many different metrics or limits. The name field in the quota override message should not be set.");
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("import_consumer_overrides").about("Create or update multiple consumer overrides atomically, all on the same consumer, but on many different metrics or limits. The name field in the quota override message should not be set.");
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("list").about("Retrieves a summary of all quota information visible to the service consumer, organized by service metric. Each metric includes information about all of its defined limits. Each limit includes the limit configuration (quota unit, preciseness, default value), the current effective limit value, and all of the overrides applied to the limit.");
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(mcmd);
}
let mut limits2 = SubCommand::with_name("limits")
.setting(AppSettings::ColoredHelp)
.about("methods: get");
{
let mcmd = SubCommand::with_name("get")
.about("Retrieves a summary of quota information for a specific quota limit.");
limits2 = limits2.subcommand(mcmd);
}
let mut admin_overrides3 = SubCommand::with_name("admin_overrides")
.setting(AppSettings::ColoredHelp)
.about("methods: create, delete, list and patch");
{
let mcmd = SubCommand::with_name("create").about("Creates an admin override. An admin override is applied by an administrator of a parent folder or parent organization of the consumer receiving the override. An admin override is intended to limit the amount of quota the consumer can use out of the total quota pool allocated to all children of the folder or organization.");
admin_overrides3 = admin_overrides3.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("delete").about("Deletes an admin override.");
admin_overrides3 = admin_overrides3.subcommand(mcmd);
}
{
let mcmd =
SubCommand::with_name("list").about("Lists all admin overrides on this limit.");
admin_overrides3 = admin_overrides3.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("patch").about("Updates an admin override.");
admin_overrides3 = admin_overrides3.subcommand(mcmd);
}
let mut consumer_overrides3 = SubCommand::with_name("consumer_overrides")
.setting(AppSettings::ColoredHelp)
.about("methods: create, delete, list and patch");
{
let mcmd = SubCommand::with_name("create").about("Creates a consumer override. A consumer override is applied to the consumer on its own authority to limit its own quota usage. Consumer overrides cannot be used to grant more quota than would be allowed by admin overrides, producer overrides, or the default limit of the service.");
consumer_overrides3 = consumer_overrides3.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("delete").about("Deletes a consumer override.");
consumer_overrides3 = consumer_overrides3.subcommand(mcmd);
}
{
let mcmd =
SubCommand::with_name("list").about("Lists all consumer overrides on this limit.");
consumer_overrides3 = consumer_overrides3.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("patch").about("Updates a consumer override.");
consumer_overrides3 = consumer_overrides3.subcommand(mcmd);
}
limits2 = limits2.subcommand(consumer_overrides3);
limits2 = limits2.subcommand(admin_overrides3);
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(limits2);
services0 = services0.subcommand(consumer_quota_metrics1);
app = app.subcommand(services0);
app = app.subcommand(operations0);
Self { app }
}
}
use google_serviceusage1_beta1 as api;
fn main() | {
// TODO: set homedir afterwards, once the address is unmovable, or use Pin for the very first time
// to allow a self-referential structure :D!
let _home_dir = dirs::config_dir()
.expect("configuration directory can be obtained")
.join("google-service-cli");
let outer = Outer::default_boxed();
let app = outer.inner.app;
let _matches = app.get_matches();
} | identifier_body |
|
main.rs | use clap::{App, AppSettings, Arg, SubCommand};
use default_boxed::DefaultBoxed;
#[derive(DefaultBoxed)]
struct Outer<'a, 'b> {
inner: HeapApp<'a, 'b>,
}
struct HeapApp<'a, 'b> {
app: App<'a, 'b>,
}
impl<'a, 'b> Default for HeapApp<'a, 'b> {
fn default() -> Self {
let mut app = App::new("serviceusage1_beta1")
.setting(clap::AppSettings::ColoredHelp)
.author("Sebastian Thiel <[email protected]>")
.version("0.1.0-20210317")
.about("Enables services that service consumers want to use on Google Cloud Platform, lists the available or enabled services, or disables services that service consumers no longer use.")
.after_help("All documentation details can be found at <TODO figure out URL>")
.arg(Arg::with_name("scope")
.long("scope")
.help("Specify the authentication method should be executed in. Each scope requires the user to grant this application permission to use it. If unset, it defaults to the shortest scope url for a particular method.")
.multiple(true)
.takes_value(true))
.arg(Arg::with_name("folder")
.long("config-dir")
.help("A directory into which we will store our persistent data. Defaults to a user-writable directory that we will create during the first invocation." )
.multiple(false)
.takes_value(true))
.arg(Arg::with_name("debug")
.long("debug")
.help("Provide more output to aid with debugging")
.multiple(false)
.takes_value(false));
let mut operations0 = SubCommand::with_name("operations")
.setting(AppSettings::ColoredHelp)
.about("methods: get and list");
{
let mcmd = SubCommand::with_name("get").about("Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.");
operations0 = operations0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("list").about("Lists operations that match the specified filter in the request. If the server doesn\'t support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `\"/v1/{name=users/*}/operations\"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.");
operations0 = operations0.subcommand(mcmd);
}
let mut services0 = SubCommand::with_name("services")
.setting(AppSettings::ColoredHelp)
.about(
"methods: batch_enable, disable, enable, generate_service_identity, get and list",
);
{
let mcmd = SubCommand::with_name("batch_enable").about("Enable multiple services on a project. The operation is atomic: if enabling any service fails, then the entire batch fails, and no state changes occur. Operation");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("disable").about("Disable a service so that it can no longer be used with a project. This prevents unintended usage that may cause unexpected billing charges or security leaks. It is not valid to call the disable method on a service that is not currently enabled. Callers will receive a `FAILED_PRECONDITION` status if the target service is not currently enabled. Operation");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("enable")
.about("Enable a service so that it can be used with a project. Operation");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("generate_service_identity")
.about("Generate service identity for service.");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("get")
.about("Returns the service configuration and enabled state for a given service.");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("list").about("List all services available to the specified project, and the current state of those services with respect to the project. The list includes all public services, all services for which the calling user has the `servicemanagement.services.bind` permission, and all services that have already been enabled on the project. The list can be filtered to only include services in a specific state, for example to only include services enabled on the project.");
services0 = services0.subcommand(mcmd);
}
let mut consumer_quota_metrics1 = SubCommand::with_name("consumer_quota_metrics")
.setting(AppSettings::ColoredHelp)
.about("methods: get, import_admin_overrides, import_consumer_overrides and list");
{
let mcmd = SubCommand::with_name("get")
.about("Retrieves a summary of quota information for a specific quota metric");
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("import_admin_overrides").about("Create or update multiple admin overrides atomically, all on the same consumer, but on many different metrics or limits. The name field in the quota override message should not be set.");
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("import_consumer_overrides").about("Create or update multiple consumer overrides atomically, all on the same consumer, but on many different metrics or limits. The name field in the quota override message should not be set.");
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("list").about("Retrieves a summary of all quota information visible to the service consumer, organized by service metric. Each metric includes information about all of its defined limits. Each limit includes the limit configuration (quota unit, preciseness, default value), the current effective limit value, and all of the overrides applied to the limit.");
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(mcmd);
}
let mut limits2 = SubCommand::with_name("limits")
.setting(AppSettings::ColoredHelp)
.about("methods: get");
{
let mcmd = SubCommand::with_name("get")
.about("Retrieves a summary of quota information for a specific quota limit.");
limits2 = limits2.subcommand(mcmd);
}
let mut admin_overrides3 = SubCommand::with_name("admin_overrides")
.setting(AppSettings::ColoredHelp)
.about("methods: create, delete, list and patch");
{
let mcmd = SubCommand::with_name("create").about("Creates an admin override. An admin override is applied by an administrator of a parent folder or parent organization of the consumer receiving the override. An admin override is intended to limit the amount of quota the consumer can use out of the total quota pool allocated to all children of the folder or organization.");
admin_overrides3 = admin_overrides3.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("delete").about("Deletes an admin override.");
admin_overrides3 = admin_overrides3.subcommand(mcmd);
}
{
let mcmd =
SubCommand::with_name("list").about("Lists all admin overrides on this limit.");
admin_overrides3 = admin_overrides3.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("patch").about("Updates an admin override.");
admin_overrides3 = admin_overrides3.subcommand(mcmd);
} | {
let mcmd = SubCommand::with_name("create").about("Creates a consumer override. A consumer override is applied to the consumer on its own authority to limit its own quota usage. Consumer overrides cannot be used to grant more quota than would be allowed by admin overrides, producer overrides, or the default limit of the service.");
consumer_overrides3 = consumer_overrides3.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("delete").about("Deletes a consumer override.");
consumer_overrides3 = consumer_overrides3.subcommand(mcmd);
}
{
let mcmd =
SubCommand::with_name("list").about("Lists all consumer overrides on this limit.");
consumer_overrides3 = consumer_overrides3.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("patch").about("Updates a consumer override.");
consumer_overrides3 = consumer_overrides3.subcommand(mcmd);
}
limits2 = limits2.subcommand(consumer_overrides3);
limits2 = limits2.subcommand(admin_overrides3);
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(limits2);
services0 = services0.subcommand(consumer_quota_metrics1);
app = app.subcommand(services0);
app = app.subcommand(operations0);
Self { app }
}
}
use google_serviceusage1_beta1 as api;
fn main() {
// TODO: set homedir afterwards, once the address is unmovable, or use Pin for the very first time
// to allow a self-referential structure :D!
let _home_dir = dirs::config_dir()
.expect("configuration directory can be obtained")
.join("google-service-cli");
let outer = Outer::default_boxed();
let app = outer.inner.app;
let _matches = app.get_matches();
} | let mut consumer_overrides3 = SubCommand::with_name("consumer_overrides")
.setting(AppSettings::ColoredHelp)
.about("methods: create, delete, list and patch"); | random_line_split |
main.rs | use clap::{App, AppSettings, Arg, SubCommand};
use default_boxed::DefaultBoxed;
#[derive(DefaultBoxed)]
struct Outer<'a, 'b> {
inner: HeapApp<'a, 'b>,
}
struct | <'a, 'b> {
app: App<'a, 'b>,
}
impl<'a, 'b> Default for HeapApp<'a, 'b> {
fn default() -> Self {
let mut app = App::new("serviceusage1_beta1")
.setting(clap::AppSettings::ColoredHelp)
.author("Sebastian Thiel <[email protected]>")
.version("0.1.0-20210317")
.about("Enables services that service consumers want to use on Google Cloud Platform, lists the available or enabled services, or disables services that service consumers no longer use.")
.after_help("All documentation details can be found at <TODO figure out URL>")
.arg(Arg::with_name("scope")
.long("scope")
.help("Specify the authentication method should be executed in. Each scope requires the user to grant this application permission to use it. If unset, it defaults to the shortest scope url for a particular method.")
.multiple(true)
.takes_value(true))
.arg(Arg::with_name("folder")
.long("config-dir")
.help("A directory into which we will store our persistent data. Defaults to a user-writable directory that we will create during the first invocation." )
.multiple(false)
.takes_value(true))
.arg(Arg::with_name("debug")
.long("debug")
.help("Provide more output to aid with debugging")
.multiple(false)
.takes_value(false));
let mut operations0 = SubCommand::with_name("operations")
.setting(AppSettings::ColoredHelp)
.about("methods: get and list");
{
let mcmd = SubCommand::with_name("get").about("Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.");
operations0 = operations0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("list").about("Lists operations that match the specified filter in the request. If the server doesn\'t support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `\"/v1/{name=users/*}/operations\"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.");
operations0 = operations0.subcommand(mcmd);
}
let mut services0 = SubCommand::with_name("services")
.setting(AppSettings::ColoredHelp)
.about(
"methods: batch_enable, disable, enable, generate_service_identity, get and list",
);
{
let mcmd = SubCommand::with_name("batch_enable").about("Enable multiple services on a project. The operation is atomic: if enabling any service fails, then the entire batch fails, and no state changes occur. Operation");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("disable").about("Disable a service so that it can no longer be used with a project. This prevents unintended usage that may cause unexpected billing charges or security leaks. It is not valid to call the disable method on a service that is not currently enabled. Callers will receive a `FAILED_PRECONDITION` status if the target service is not currently enabled. Operation");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("enable")
.about("Enable a service so that it can be used with a project. Operation");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("generate_service_identity")
.about("Generate service identity for service.");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("get")
.about("Returns the service configuration and enabled state for a given service.");
services0 = services0.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("list").about("List all services available to the specified project, and the current state of those services with respect to the project. The list includes all public services, all services for which the calling user has the `servicemanagement.services.bind` permission, and all services that have already been enabled on the project. The list can be filtered to only include services in a specific state, for example to only include services enabled on the project.");
services0 = services0.subcommand(mcmd);
}
let mut consumer_quota_metrics1 = SubCommand::with_name("consumer_quota_metrics")
.setting(AppSettings::ColoredHelp)
.about("methods: get, import_admin_overrides, import_consumer_overrides and list");
{
let mcmd = SubCommand::with_name("get")
.about("Retrieves a summary of quota information for a specific quota metric");
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("import_admin_overrides").about("Create or update multiple admin overrides atomically, all on the same consumer, but on many different metrics or limits. The name field in the quota override message should not be set.");
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("import_consumer_overrides").about("Create or update multiple consumer overrides atomically, all on the same consumer, but on many different metrics or limits. The name field in the quota override message should not be set.");
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("list").about("Retrieves a summary of all quota information visible to the service consumer, organized by service metric. Each metric includes information about all of its defined limits. Each limit includes the limit configuration (quota unit, preciseness, default value), the current effective limit value, and all of the overrides applied to the limit.");
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(mcmd);
}
let mut limits2 = SubCommand::with_name("limits")
.setting(AppSettings::ColoredHelp)
.about("methods: get");
{
let mcmd = SubCommand::with_name("get")
.about("Retrieves a summary of quota information for a specific quota limit.");
limits2 = limits2.subcommand(mcmd);
}
let mut admin_overrides3 = SubCommand::with_name("admin_overrides")
.setting(AppSettings::ColoredHelp)
.about("methods: create, delete, list and patch");
{
let mcmd = SubCommand::with_name("create").about("Creates an admin override. An admin override is applied by an administrator of a parent folder or parent organization of the consumer receiving the override. An admin override is intended to limit the amount of quota the consumer can use out of the total quota pool allocated to all children of the folder or organization.");
admin_overrides3 = admin_overrides3.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("delete").about("Deletes an admin override.");
admin_overrides3 = admin_overrides3.subcommand(mcmd);
}
{
let mcmd =
SubCommand::with_name("list").about("Lists all admin overrides on this limit.");
admin_overrides3 = admin_overrides3.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("patch").about("Updates an admin override.");
admin_overrides3 = admin_overrides3.subcommand(mcmd);
}
let mut consumer_overrides3 = SubCommand::with_name("consumer_overrides")
.setting(AppSettings::ColoredHelp)
.about("methods: create, delete, list and patch");
{
let mcmd = SubCommand::with_name("create").about("Creates a consumer override. A consumer override is applied to the consumer on its own authority to limit its own quota usage. Consumer overrides cannot be used to grant more quota than would be allowed by admin overrides, producer overrides, or the default limit of the service.");
consumer_overrides3 = consumer_overrides3.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("delete").about("Deletes a consumer override.");
consumer_overrides3 = consumer_overrides3.subcommand(mcmd);
}
{
let mcmd =
SubCommand::with_name("list").about("Lists all consumer overrides on this limit.");
consumer_overrides3 = consumer_overrides3.subcommand(mcmd);
}
{
let mcmd = SubCommand::with_name("patch").about("Updates a consumer override.");
consumer_overrides3 = consumer_overrides3.subcommand(mcmd);
}
limits2 = limits2.subcommand(consumer_overrides3);
limits2 = limits2.subcommand(admin_overrides3);
consumer_quota_metrics1 = consumer_quota_metrics1.subcommand(limits2);
services0 = services0.subcommand(consumer_quota_metrics1);
app = app.subcommand(services0);
app = app.subcommand(operations0);
Self { app }
}
}
use google_serviceusage1_beta1 as api;
fn main() {
// TODO: set homedir afterwards, once the address is unmovable, or use Pin for the very first time
// to allow a self-referential structure :D!
let _home_dir = dirs::config_dir()
.expect("configuration directory can be obtained")
.join("google-service-cli");
let outer = Outer::default_boxed();
let app = outer.inner.app;
let _matches = app.get_matches();
}
| HeapApp | identifier_name |
fmt.rs | use std::{
borrow::Cow,
fmt::{self, Write as _},
io,
time::Duration,
};
use termcolor::{ColorSpec, WriteColor};
use unicode_width::UnicodeWidthChar;
use crate::{markup, Markup, MarkupElement};
/// A stack-allocated linked-list of [MarkupElement] slices
pub enum MarkupElements<'a> {
Root,
Node(&'a Self, &'a [MarkupElement]),
}
impl<'a> MarkupElements<'a> {
/// Iterates on all the element slices depth-first
pub fn for_each(&self, func: &mut impl FnMut(&'a [MarkupElement])) {
if let Self::Node(parent, elem) = self {
parent.for_each(func);
func(elem);
}
}
}
pub trait Write {
fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()>;
fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()>;
}
/// Applies the current format in `state` to `writer`, calls `func` to
/// print a piece of text, then reset the printing format
fn with_format<W>(
writer: &mut W,
state: &MarkupElements,
func: impl FnOnce(&mut W) -> io::Result<()>,
) -> io::Result<()>
where
W: WriteColor,
{
let mut color = ColorSpec::new();
state.for_each(&mut |elements| {
for element in elements {
element.update_color(&mut color);
}
});
if let Err(err) = writer.set_color(&color) {
writer.reset()?;
return Err(err);
}
let result = func(writer);
writer.reset()?;
result
}
/// Adapter struct implementing [Write] over types implementing [WriteColor]
pub struct Termcolor<W>(pub W);
impl<W> Write for Termcolor<W>
where
W: WriteColor,
{
fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()> {
with_format(&mut self.0, elements, |writer| {
let mut adapter = SanitizeAdapter {
writer,
error: Ok(()),
};
match adapter.write_str(content) {
Ok(()) => Ok(()),
Err(..) => {
if adapter.error.is_err() {
adapter.error
} else {
// SanitizeAdapter can only fail if the underlying
// writer returns an error
unreachable!()
}
}
}
})
}
fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()> {
with_format(&mut self.0, elements, |writer| {
let mut adapter = SanitizeAdapter {
writer,
error: Ok(()),
};
match adapter.write_fmt(content) {
Ok(()) => Ok(()),
Err(..) => {
if adapter.error.is_err() {
adapter.error
} else {
Err(io::Error::new(
io::ErrorKind::Other,
"a Display formatter returned an error",
))
}
}
}
})
}
}
/// Adapter [fmt::Write] calls to [io::Write] with sanitization,
/// implemented as an internal struct to avoid exposing [fmt::Write] on
/// [Termcolor]
struct SanitizeAdapter<W> {
writer: W,
error: io::Result<()>,
}
impl<W: io::Write> fmt::Write for SanitizeAdapter<W> {
fn write_str(&mut self, content: &str) -> fmt::Result {
let mut buffer = [0; 4];
for item in content.chars() {
// Replace non-whitespace, zero-width characters with the Unicode replacement character
let is_whitespace = item.is_whitespace();
let is_zero_width = UnicodeWidthChar::width(item).map_or(true, |width| width == 0);
let item = if!is_whitespace && is_zero_width {
char::REPLACEMENT_CHARACTER
} else {
item
};
item.encode_utf8(&mut buffer);
if let Err(err) = self.writer.write_all(&buffer[..item.len_utf8()]) {
self.error = Err(err);
return Err(fmt::Error);
}
}
Ok(())
}
}
/// The [Formatter] is the `rome_console` equivalent to [std::fmt::Formatter]:
/// it's never constructed directly by consumers, and can only be used through
/// the mutable reference passed to implementations of the [Display] trait).
/// It manages the state of the markup to print, and implementations of
/// [Display] can call into its methods to append content into the current
/// printing session
pub struct Formatter<'fmt> {
/// Stack of markup elements currently applied to the text being printed
state: MarkupElements<'fmt>,
/// Inner IO writer this [Formatter] will print text into
writer: &'fmt mut dyn Write,
}
impl<'fmt> Formatter<'fmt> {
/// Create a new instance of the [Formatter] using the provided `writer` for printing
pub fn new(writer: &'fmt mut dyn Write) -> Self {
Self {
state: MarkupElements::Root,
writer,
}
}
/// Return a new instance of the [Formatter] with `elements` appended to its element stack
fn with_elements<'b>(&'b mut self, elements: &'b [MarkupElement]) -> Formatter<'b> {
Formatter {
state: MarkupElements::Node(&self.state, elements),
writer: self.writer,
}
}
/// Write a piece of markup into this formatter
pub fn write_markup(&mut self, markup: Markup) -> io::Result<()> {
for node in markup.0 {
let mut fmt = self.with_elements(node.elements);
node.content.fmt(&mut fmt)?;
}
Ok(())
}
/// Write a slice of text into this formatter
pub fn write_str(&mut self, content: &str) -> io::Result<()> {
self.writer.write_str(&self.state, content)
}
/// Write formatted text into this formatter
pub fn write_fmt(&mut self, content: fmt::Arguments) -> io::Result<()> {
self.writer.write_fmt(&self.state, content)
}
}
/// Formatting trait for types to be displayed as markup, the `rome_console`
/// equivalent to [std::fmt::Display]
///
/// # Example
/// Implementing `Display` on a custom struct
/// ```
/// use std::io;
/// use rome_console::{fmt::{Display, Formatter}, markup};
///
/// struct Warning(String);
///
/// impl Display for Warning {
/// fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
/// fmt.write_markup(markup! {
/// <Warn>{self.0}</Warn>
/// })
/// }
/// }
///
/// let warning = Warning(String::from("content"));
/// markup! {
/// <Emphasis>{warning}</Emphasis>
/// };
/// ```
pub trait Display {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()>;
}
// Blanket implementations of Display for reference types
impl<'a, T> Display for &'a T
where
T: Display +?Sized,
{
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
T::fmt(self, fmt)
}
}
impl<'a, T> Display for Cow<'a, T>
where
T: Display + ToOwned +?Sized,
{
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
T::fmt(self, fmt)
}
}
// Simple implementations of Display calling through to write_str for types
// that implement Deref<str>
impl Display for str {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_str(self)
}
}
impl Display for String {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_str(self)
}
}
// Implement Display for Markup and Rust format Arguments
impl<'a> Display for Markup<'a> {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_markup(*self)
}
}
impl<'a> Display for std::fmt::Arguments<'a> {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_fmt(*self)
}
}
/// Implement [Display] for types that implement [std::fmt::Display] by calling
/// through to [Formatter::write_fmt]
macro_rules! impl_std_display {
($ty:ty) => {
impl Display for $ty {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
write!(fmt, "{self}")
}
}
};
}
impl_std_display!(char);
impl_std_display!(i8);
impl_std_display!(i16);
impl_std_display!(i32);
impl_std_display!(i64);
impl_std_display!(i128);
impl_std_display!(isize);
impl_std_display!(u8);
impl_std_display!(u16);
impl_std_display!(u32);
impl_std_display!(u64);
impl_std_display!(u128);
impl_std_display!(usize);
impl Display for Duration {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
use crate as rome_console;
let secs = self.as_secs();
if secs > 1 {
return fmt.write_markup(markup! {
{secs}<Dim>"s"</Dim>
});
}
let millis = self.as_millis();
if millis > 1 {
return fmt.write_markup(markup! {
{millis}<Dim>"ms"</Dim>
});
}
let micros = self.as_micros();
if micros > 1 {
return fmt.write_markup(markup! {
{micros}<Dim>"µs"</Dim>
});
}
let nanos = self.as_nanos();
fmt.write_markup(markup! {
{nanos}<Dim>"ns"</Dim>
})
}
}
#[cfg(test)]
mod tests {
use std::{fmt::Write, str::from_utf8};
use super::SanitizeAdapter;
#[test]
fn test_sanitize() {
// Sanitization should leave whitespace control characters (space,
// tabs, newline,...) and non-ASCII unicode characters as-is but
// redact zero-width characters (RTL override, null character, bell,
// zero-width space,...) | {
let mut adapter = SanitizeAdapter {
writer: &mut buffer,
error: Ok(()),
};
adapter.write_str(INPUT).unwrap();
adapter.error.unwrap();
}
assert_eq!(from_utf8(&buffer).unwrap(), OUTPUT);
}
} | const INPUT: &str = "t\tes t\r\n\u{202D}t\0es\x07t\u{202E}\nt\u{200B}es🐛t";
const OUTPUT: &str = "t\tes t\r\n\u{FFFD}t\u{FFFD}es\u{FFFD}t\u{FFFD}\nt\u{FFFD}es🐛t";
let mut buffer = Vec::new();
| random_line_split |
fmt.rs | use std::{
borrow::Cow,
fmt::{self, Write as _},
io,
time::Duration,
};
use termcolor::{ColorSpec, WriteColor};
use unicode_width::UnicodeWidthChar;
use crate::{markup, Markup, MarkupElement};
/// A stack-allocated linked-list of [MarkupElement] slices
pub enum MarkupElements<'a> {
Root,
Node(&'a Self, &'a [MarkupElement]),
}
impl<'a> MarkupElements<'a> {
/// Iterates on all the element slices depth-first
pub fn for_each(&self, func: &mut impl FnMut(&'a [MarkupElement])) {
if let Self::Node(parent, elem) = self {
parent.for_each(func);
func(elem);
}
}
}
pub trait Write {
fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()>;
fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()>;
}
/// Applies the current format in `state` to `writer`, calls `func` to
/// print a piece of text, then reset the printing format
fn with_format<W>(
writer: &mut W,
state: &MarkupElements,
func: impl FnOnce(&mut W) -> io::Result<()>,
) -> io::Result<()>
where
W: WriteColor,
{
let mut color = ColorSpec::new();
state.for_each(&mut |elements| {
for element in elements {
element.update_color(&mut color);
}
});
if let Err(err) = writer.set_color(&color) {
writer.reset()?;
return Err(err);
}
let result = func(writer);
writer.reset()?;
result
}
/// Adapter struct implementing [Write] over types implementing [WriteColor]
pub struct Termcolor<W>(pub W);
impl<W> Write for Termcolor<W>
where
W: WriteColor,
{
fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()> {
with_format(&mut self.0, elements, |writer| {
let mut adapter = SanitizeAdapter {
writer,
error: Ok(()),
};
match adapter.write_str(content) {
Ok(()) => Ok(()),
Err(..) => {
if adapter.error.is_err() {
adapter.error
} else {
// SanitizeAdapter can only fail if the underlying
// writer returns an error
unreachable!()
}
}
}
})
}
fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()> {
with_format(&mut self.0, elements, |writer| {
let mut adapter = SanitizeAdapter {
writer,
error: Ok(()),
};
match adapter.write_fmt(content) {
Ok(()) => Ok(()),
Err(..) => {
if adapter.error.is_err() {
adapter.error
} else {
Err(io::Error::new(
io::ErrorKind::Other,
"a Display formatter returned an error",
))
}
}
}
})
}
}
/// Adapter [fmt::Write] calls to [io::Write] with sanitization,
/// implemented as an internal struct to avoid exposing [fmt::Write] on
/// [Termcolor]
struct SanitizeAdapter<W> {
writer: W,
error: io::Result<()>,
}
impl<W: io::Write> fmt::Write for SanitizeAdapter<W> {
fn write_str(&mut self, content: &str) -> fmt::Result {
let mut buffer = [0; 4];
for item in content.chars() {
// Replace non-whitespace, zero-width characters with the Unicode replacement character
let is_whitespace = item.is_whitespace();
let is_zero_width = UnicodeWidthChar::width(item).map_or(true, |width| width == 0);
let item = if!is_whitespace && is_zero_width {
char::REPLACEMENT_CHARACTER
} else {
item
};
item.encode_utf8(&mut buffer);
if let Err(err) = self.writer.write_all(&buffer[..item.len_utf8()]) {
self.error = Err(err);
return Err(fmt::Error);
}
}
Ok(())
}
}
/// The [Formatter] is the `rome_console` equivalent to [std::fmt::Formatter]:
/// it's never constructed directly by consumers, and can only be used through
/// the mutable reference passed to implementations of the [Display] trait).
/// It manages the state of the markup to print, and implementations of
/// [Display] can call into its methods to append content into the current
/// printing session
pub struct Formatter<'fmt> {
/// Stack of markup elements currently applied to the text being printed
state: MarkupElements<'fmt>,
/// Inner IO writer this [Formatter] will print text into
writer: &'fmt mut dyn Write,
}
impl<'fmt> Formatter<'fmt> {
/// Create a new instance of the [Formatter] using the provided `writer` for printing
pub fn new(writer: &'fmt mut dyn Write) -> Self {
Self {
state: MarkupElements::Root,
writer,
}
}
/// Return a new instance of the [Formatter] with `elements` appended to its element stack
fn with_elements<'b>(&'b mut self, elements: &'b [MarkupElement]) -> Formatter<'b> |
/// Write a piece of markup into this formatter
pub fn write_markup(&mut self, markup: Markup) -> io::Result<()> {
for node in markup.0 {
let mut fmt = self.with_elements(node.elements);
node.content.fmt(&mut fmt)?;
}
Ok(())
}
/// Write a slice of text into this formatter
pub fn write_str(&mut self, content: &str) -> io::Result<()> {
self.writer.write_str(&self.state, content)
}
/// Write formatted text into this formatter
pub fn write_fmt(&mut self, content: fmt::Arguments) -> io::Result<()> {
self.writer.write_fmt(&self.state, content)
}
}
/// Formatting trait for types to be displayed as markup, the `rome_console`
/// equivalent to [std::fmt::Display]
///
/// # Example
/// Implementing `Display` on a custom struct
/// ```
/// use std::io;
/// use rome_console::{fmt::{Display, Formatter}, markup};
///
/// struct Warning(String);
///
/// impl Display for Warning {
/// fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
/// fmt.write_markup(markup! {
/// <Warn>{self.0}</Warn>
/// })
/// }
/// }
///
/// let warning = Warning(String::from("content"));
/// markup! {
/// <Emphasis>{warning}</Emphasis>
/// };
/// ```
pub trait Display {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()>;
}
// Blanket implementations of Display for reference types
impl<'a, T> Display for &'a T
where
T: Display +?Sized,
{
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
T::fmt(self, fmt)
}
}
impl<'a, T> Display for Cow<'a, T>
where
T: Display + ToOwned +?Sized,
{
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
T::fmt(self, fmt)
}
}
// Simple implementations of Display calling through to write_str for types
// that implement Deref<str>
impl Display for str {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_str(self)
}
}
impl Display for String {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_str(self)
}
}
// Implement Display for Markup and Rust format Arguments
impl<'a> Display for Markup<'a> {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_markup(*self)
}
}
impl<'a> Display for std::fmt::Arguments<'a> {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_fmt(*self)
}
}
/// Implement [Display] for types that implement [std::fmt::Display] by calling
/// through to [Formatter::write_fmt]
macro_rules! impl_std_display {
($ty:ty) => {
impl Display for $ty {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
write!(fmt, "{self}")
}
}
};
}
impl_std_display!(char);
impl_std_display!(i8);
impl_std_display!(i16);
impl_std_display!(i32);
impl_std_display!(i64);
impl_std_display!(i128);
impl_std_display!(isize);
impl_std_display!(u8);
impl_std_display!(u16);
impl_std_display!(u32);
impl_std_display!(u64);
impl_std_display!(u128);
impl_std_display!(usize);
impl Display for Duration {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
use crate as rome_console;
let secs = self.as_secs();
if secs > 1 {
return fmt.write_markup(markup! {
{secs}<Dim>"s"</Dim>
});
}
let millis = self.as_millis();
if millis > 1 {
return fmt.write_markup(markup! {
{millis}<Dim>"ms"</Dim>
});
}
let micros = self.as_micros();
if micros > 1 {
return fmt.write_markup(markup! {
{micros}<Dim>"µs"</Dim>
});
}
let nanos = self.as_nanos();
fmt.write_markup(markup! {
{nanos}<Dim>"ns"</Dim>
})
}
}
#[cfg(test)]
mod tests {
use std::{fmt::Write, str::from_utf8};
use super::SanitizeAdapter;
#[test]
fn test_sanitize() {
// Sanitization should leave whitespace control characters (space,
// tabs, newline,...) and non-ASCII unicode characters as-is but
// redact zero-width characters (RTL override, null character, bell,
// zero-width space,...)
const INPUT: &str = "t\tes t\r\n\u{202D}t\0es\x07t\u{202E}\nt\u{200B}es🐛t";
const OUTPUT: &str = "t\tes t\r\n\u{FFFD}t\u{FFFD}es\u{FFFD}t\u{FFFD}\nt\u{FFFD}es🐛t";
let mut buffer = Vec::new();
{
let mut adapter = SanitizeAdapter {
writer: &mut buffer,
error: Ok(()),
};
adapter.write_str(INPUT).unwrap();
adapter.error.unwrap();
}
assert_eq!(from_utf8(&buffer).unwrap(), OUTPUT);
}
}
| {
Formatter {
state: MarkupElements::Node(&self.state, elements),
writer: self.writer,
}
} | identifier_body |
fmt.rs | use std::{
borrow::Cow,
fmt::{self, Write as _},
io,
time::Duration,
};
use termcolor::{ColorSpec, WriteColor};
use unicode_width::UnicodeWidthChar;
use crate::{markup, Markup, MarkupElement};
/// A stack-allocated linked-list of [MarkupElement] slices
pub enum MarkupElements<'a> {
Root,
Node(&'a Self, &'a [MarkupElement]),
}
impl<'a> MarkupElements<'a> {
/// Iterates on all the element slices depth-first
pub fn for_each(&self, func: &mut impl FnMut(&'a [MarkupElement])) {
if let Self::Node(parent, elem) = self {
parent.for_each(func);
func(elem);
}
}
}
pub trait Write {
fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()>;
fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()>;
}
/// Applies the current format in `state` to `writer`, calls `func` to
/// print a piece of text, then reset the printing format
fn with_format<W>(
writer: &mut W,
state: &MarkupElements,
func: impl FnOnce(&mut W) -> io::Result<()>,
) -> io::Result<()>
where
W: WriteColor,
{
let mut color = ColorSpec::new();
state.for_each(&mut |elements| {
for element in elements {
element.update_color(&mut color);
}
});
if let Err(err) = writer.set_color(&color) {
writer.reset()?;
return Err(err);
}
let result = func(writer);
writer.reset()?;
result
}
/// Adapter struct implementing [Write] over types implementing [WriteColor]
pub struct Termcolor<W>(pub W);
impl<W> Write for Termcolor<W>
where
W: WriteColor,
{
fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()> {
with_format(&mut self.0, elements, |writer| {
let mut adapter = SanitizeAdapter {
writer,
error: Ok(()),
};
match adapter.write_str(content) {
Ok(()) => Ok(()),
Err(..) => {
if adapter.error.is_err() | else {
// SanitizeAdapter can only fail if the underlying
// writer returns an error
unreachable!()
}
}
}
})
}
fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()> {
with_format(&mut self.0, elements, |writer| {
let mut adapter = SanitizeAdapter {
writer,
error: Ok(()),
};
match adapter.write_fmt(content) {
Ok(()) => Ok(()),
Err(..) => {
if adapter.error.is_err() {
adapter.error
} else {
Err(io::Error::new(
io::ErrorKind::Other,
"a Display formatter returned an error",
))
}
}
}
})
}
}
/// Adapter [fmt::Write] calls to [io::Write] with sanitization,
/// implemented as an internal struct to avoid exposing [fmt::Write] on
/// [Termcolor]
struct SanitizeAdapter<W> {
writer: W,
error: io::Result<()>,
}
impl<W: io::Write> fmt::Write for SanitizeAdapter<W> {
fn write_str(&mut self, content: &str) -> fmt::Result {
let mut buffer = [0; 4];
for item in content.chars() {
// Replace non-whitespace, zero-width characters with the Unicode replacement character
let is_whitespace = item.is_whitespace();
let is_zero_width = UnicodeWidthChar::width(item).map_or(true, |width| width == 0);
let item = if!is_whitespace && is_zero_width {
char::REPLACEMENT_CHARACTER
} else {
item
};
item.encode_utf8(&mut buffer);
if let Err(err) = self.writer.write_all(&buffer[..item.len_utf8()]) {
self.error = Err(err);
return Err(fmt::Error);
}
}
Ok(())
}
}
/// The [Formatter] is the `rome_console` equivalent to [std::fmt::Formatter]:
/// it's never constructed directly by consumers, and can only be used through
/// the mutable reference passed to implementations of the [Display] trait).
/// It manages the state of the markup to print, and implementations of
/// [Display] can call into its methods to append content into the current
/// printing session
pub struct Formatter<'fmt> {
/// Stack of markup elements currently applied to the text being printed
state: MarkupElements<'fmt>,
/// Inner IO writer this [Formatter] will print text into
writer: &'fmt mut dyn Write,
}
impl<'fmt> Formatter<'fmt> {
/// Create a new instance of the [Formatter] using the provided `writer` for printing
pub fn new(writer: &'fmt mut dyn Write) -> Self {
Self {
state: MarkupElements::Root,
writer,
}
}
/// Return a new instance of the [Formatter] with `elements` appended to its element stack
fn with_elements<'b>(&'b mut self, elements: &'b [MarkupElement]) -> Formatter<'b> {
Formatter {
state: MarkupElements::Node(&self.state, elements),
writer: self.writer,
}
}
/// Write a piece of markup into this formatter
pub fn write_markup(&mut self, markup: Markup) -> io::Result<()> {
for node in markup.0 {
let mut fmt = self.with_elements(node.elements);
node.content.fmt(&mut fmt)?;
}
Ok(())
}
/// Write a slice of text into this formatter
pub fn write_str(&mut self, content: &str) -> io::Result<()> {
self.writer.write_str(&self.state, content)
}
/// Write formatted text into this formatter
pub fn write_fmt(&mut self, content: fmt::Arguments) -> io::Result<()> {
self.writer.write_fmt(&self.state, content)
}
}
/// Formatting trait for types to be displayed as markup, the `rome_console`
/// equivalent to [std::fmt::Display]
///
/// # Example
/// Implementing `Display` on a custom struct
/// ```
/// use std::io;
/// use rome_console::{fmt::{Display, Formatter}, markup};
///
/// struct Warning(String);
///
/// impl Display for Warning {
/// fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
/// fmt.write_markup(markup! {
/// <Warn>{self.0}</Warn>
/// })
/// }
/// }
///
/// let warning = Warning(String::from("content"));
/// markup! {
/// <Emphasis>{warning}</Emphasis>
/// };
/// ```
pub trait Display {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()>;
}
// Blanket implementations of Display for reference types
impl<'a, T> Display for &'a T
where
T: Display +?Sized,
{
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
T::fmt(self, fmt)
}
}
impl<'a, T> Display for Cow<'a, T>
where
T: Display + ToOwned +?Sized,
{
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
T::fmt(self, fmt)
}
}
// Simple implementations of Display calling through to write_str for types
// that implement Deref<str>
impl Display for str {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_str(self)
}
}
impl Display for String {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_str(self)
}
}
// Implement Display for Markup and Rust format Arguments
impl<'a> Display for Markup<'a> {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_markup(*self)
}
}
impl<'a> Display for std::fmt::Arguments<'a> {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_fmt(*self)
}
}
/// Implement [Display] for types that implement [std::fmt::Display] by calling
/// through to [Formatter::write_fmt]
macro_rules! impl_std_display {
($ty:ty) => {
impl Display for $ty {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
write!(fmt, "{self}")
}
}
};
}
impl_std_display!(char);
impl_std_display!(i8);
impl_std_display!(i16);
impl_std_display!(i32);
impl_std_display!(i64);
impl_std_display!(i128);
impl_std_display!(isize);
impl_std_display!(u8);
impl_std_display!(u16);
impl_std_display!(u32);
impl_std_display!(u64);
impl_std_display!(u128);
impl_std_display!(usize);
impl Display for Duration {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
use crate as rome_console;
let secs = self.as_secs();
if secs > 1 {
return fmt.write_markup(markup! {
{secs}<Dim>"s"</Dim>
});
}
let millis = self.as_millis();
if millis > 1 {
return fmt.write_markup(markup! {
{millis}<Dim>"ms"</Dim>
});
}
let micros = self.as_micros();
if micros > 1 {
return fmt.write_markup(markup! {
{micros}<Dim>"µs"</Dim>
});
}
let nanos = self.as_nanos();
fmt.write_markup(markup! {
{nanos}<Dim>"ns"</Dim>
})
}
}
#[cfg(test)]
mod tests {
use std::{fmt::Write, str::from_utf8};
use super::SanitizeAdapter;
#[test]
fn test_sanitize() {
// Sanitization should leave whitespace control characters (space,
// tabs, newline,...) and non-ASCII unicode characters as-is but
// redact zero-width characters (RTL override, null character, bell,
// zero-width space,...)
const INPUT: &str = "t\tes t\r\n\u{202D}t\0es\x07t\u{202E}\nt\u{200B}es🐛t";
const OUTPUT: &str = "t\tes t\r\n\u{FFFD}t\u{FFFD}es\u{FFFD}t\u{FFFD}\nt\u{FFFD}es🐛t";
let mut buffer = Vec::new();
{
let mut adapter = SanitizeAdapter {
writer: &mut buffer,
error: Ok(()),
};
adapter.write_str(INPUT).unwrap();
adapter.error.unwrap();
}
assert_eq!(from_utf8(&buffer).unwrap(), OUTPUT);
}
}
| {
adapter.error
} | conditional_block |
fmt.rs | use std::{
borrow::Cow,
fmt::{self, Write as _},
io,
time::Duration,
};
use termcolor::{ColorSpec, WriteColor};
use unicode_width::UnicodeWidthChar;
use crate::{markup, Markup, MarkupElement};
/// A stack-allocated linked-list of [MarkupElement] slices
pub enum MarkupElements<'a> {
Root,
Node(&'a Self, &'a [MarkupElement]),
}
impl<'a> MarkupElements<'a> {
/// Iterates on all the element slices depth-first
pub fn for_each(&self, func: &mut impl FnMut(&'a [MarkupElement])) {
if let Self::Node(parent, elem) = self {
parent.for_each(func);
func(elem);
}
}
}
pub trait Write {
fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()>;
fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()>;
}
/// Applies the current format in `state` to `writer`, calls `func` to
/// print a piece of text, then reset the printing format
fn with_format<W>(
writer: &mut W,
state: &MarkupElements,
func: impl FnOnce(&mut W) -> io::Result<()>,
) -> io::Result<()>
where
W: WriteColor,
{
let mut color = ColorSpec::new();
state.for_each(&mut |elements| {
for element in elements {
element.update_color(&mut color);
}
});
if let Err(err) = writer.set_color(&color) {
writer.reset()?;
return Err(err);
}
let result = func(writer);
writer.reset()?;
result
}
/// Adapter struct implementing [Write] over types implementing [WriteColor]
pub struct Termcolor<W>(pub W);
impl<W> Write for Termcolor<W>
where
W: WriteColor,
{
fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()> {
with_format(&mut self.0, elements, |writer| {
let mut adapter = SanitizeAdapter {
writer,
error: Ok(()),
};
match adapter.write_str(content) {
Ok(()) => Ok(()),
Err(..) => {
if adapter.error.is_err() {
adapter.error
} else {
// SanitizeAdapter can only fail if the underlying
// writer returns an error
unreachable!()
}
}
}
})
}
fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()> {
with_format(&mut self.0, elements, |writer| {
let mut adapter = SanitizeAdapter {
writer,
error: Ok(()),
};
match adapter.write_fmt(content) {
Ok(()) => Ok(()),
Err(..) => {
if adapter.error.is_err() {
adapter.error
} else {
Err(io::Error::new(
io::ErrorKind::Other,
"a Display formatter returned an error",
))
}
}
}
})
}
}
/// Adapter [fmt::Write] calls to [io::Write] with sanitization,
/// implemented as an internal struct to avoid exposing [fmt::Write] on
/// [Termcolor]
struct SanitizeAdapter<W> {
writer: W,
error: io::Result<()>,
}
impl<W: io::Write> fmt::Write for SanitizeAdapter<W> {
fn | (&mut self, content: &str) -> fmt::Result {
let mut buffer = [0; 4];
for item in content.chars() {
// Replace non-whitespace, zero-width characters with the Unicode replacement character
let is_whitespace = item.is_whitespace();
let is_zero_width = UnicodeWidthChar::width(item).map_or(true, |width| width == 0);
let item = if!is_whitespace && is_zero_width {
char::REPLACEMENT_CHARACTER
} else {
item
};
item.encode_utf8(&mut buffer);
if let Err(err) = self.writer.write_all(&buffer[..item.len_utf8()]) {
self.error = Err(err);
return Err(fmt::Error);
}
}
Ok(())
}
}
/// The [Formatter] is the `rome_console` equivalent to [std::fmt::Formatter]:
/// it's never constructed directly by consumers, and can only be used through
/// the mutable reference passed to implementations of the [Display] trait).
/// It manages the state of the markup to print, and implementations of
/// [Display] can call into its methods to append content into the current
/// printing session
pub struct Formatter<'fmt> {
/// Stack of markup elements currently applied to the text being printed
state: MarkupElements<'fmt>,
/// Inner IO writer this [Formatter] will print text into
writer: &'fmt mut dyn Write,
}
impl<'fmt> Formatter<'fmt> {
/// Create a new instance of the [Formatter] using the provided `writer` for printing
pub fn new(writer: &'fmt mut dyn Write) -> Self {
Self {
state: MarkupElements::Root,
writer,
}
}
/// Return a new instance of the [Formatter] with `elements` appended to its element stack
fn with_elements<'b>(&'b mut self, elements: &'b [MarkupElement]) -> Formatter<'b> {
Formatter {
state: MarkupElements::Node(&self.state, elements),
writer: self.writer,
}
}
/// Write a piece of markup into this formatter
pub fn write_markup(&mut self, markup: Markup) -> io::Result<()> {
for node in markup.0 {
let mut fmt = self.with_elements(node.elements);
node.content.fmt(&mut fmt)?;
}
Ok(())
}
/// Write a slice of text into this formatter
pub fn write_str(&mut self, content: &str) -> io::Result<()> {
self.writer.write_str(&self.state, content)
}
/// Write formatted text into this formatter
pub fn write_fmt(&mut self, content: fmt::Arguments) -> io::Result<()> {
self.writer.write_fmt(&self.state, content)
}
}
/// Formatting trait for types to be displayed as markup, the `rome_console`
/// equivalent to [std::fmt::Display]
///
/// # Example
/// Implementing `Display` on a custom struct
/// ```
/// use std::io;
/// use rome_console::{fmt::{Display, Formatter}, markup};
///
/// struct Warning(String);
///
/// impl Display for Warning {
/// fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
/// fmt.write_markup(markup! {
/// <Warn>{self.0}</Warn>
/// })
/// }
/// }
///
/// let warning = Warning(String::from("content"));
/// markup! {
/// <Emphasis>{warning}</Emphasis>
/// };
/// ```
pub trait Display {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()>;
}
// Blanket implementations of Display for reference types
impl<'a, T> Display for &'a T
where
T: Display +?Sized,
{
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
T::fmt(self, fmt)
}
}
impl<'a, T> Display for Cow<'a, T>
where
T: Display + ToOwned +?Sized,
{
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
T::fmt(self, fmt)
}
}
// Simple implementations of Display calling through to write_str for types
// that implement Deref<str>
impl Display for str {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_str(self)
}
}
impl Display for String {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_str(self)
}
}
// Implement Display for Markup and Rust format Arguments
impl<'a> Display for Markup<'a> {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_markup(*self)
}
}
impl<'a> Display for std::fmt::Arguments<'a> {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
fmt.write_fmt(*self)
}
}
/// Implement [Display] for types that implement [std::fmt::Display] by calling
/// through to [Formatter::write_fmt]
macro_rules! impl_std_display {
($ty:ty) => {
impl Display for $ty {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
write!(fmt, "{self}")
}
}
};
}
impl_std_display!(char);
impl_std_display!(i8);
impl_std_display!(i16);
impl_std_display!(i32);
impl_std_display!(i64);
impl_std_display!(i128);
impl_std_display!(isize);
impl_std_display!(u8);
impl_std_display!(u16);
impl_std_display!(u32);
impl_std_display!(u64);
impl_std_display!(u128);
impl_std_display!(usize);
impl Display for Duration {
fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {
use crate as rome_console;
let secs = self.as_secs();
if secs > 1 {
return fmt.write_markup(markup! {
{secs}<Dim>"s"</Dim>
});
}
let millis = self.as_millis();
if millis > 1 {
return fmt.write_markup(markup! {
{millis}<Dim>"ms"</Dim>
});
}
let micros = self.as_micros();
if micros > 1 {
return fmt.write_markup(markup! {
{micros}<Dim>"µs"</Dim>
});
}
let nanos = self.as_nanos();
fmt.write_markup(markup! {
{nanos}<Dim>"ns"</Dim>
})
}
}
#[cfg(test)]
mod tests {
use std::{fmt::Write, str::from_utf8};
use super::SanitizeAdapter;
#[test]
fn test_sanitize() {
// Sanitization should leave whitespace control characters (space,
// tabs, newline,...) and non-ASCII unicode characters as-is but
// redact zero-width characters (RTL override, null character, bell,
// zero-width space,...)
const INPUT: &str = "t\tes t\r\n\u{202D}t\0es\x07t\u{202E}\nt\u{200B}es🐛t";
const OUTPUT: &str = "t\tes t\r\n\u{FFFD}t\u{FFFD}es\u{FFFD}t\u{FFFD}\nt\u{FFFD}es🐛t";
let mut buffer = Vec::new();
{
let mut adapter = SanitizeAdapter {
writer: &mut buffer,
error: Ok(()),
};
adapter.write_str(INPUT).unwrap();
adapter.error.unwrap();
}
assert_eq!(from_utf8(&buffer).unwrap(), OUTPUT);
}
}
| write_str | identifier_name |
serial.rs | // Copyright 2016 taskqueue developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
use std::boxed::FnBox;
use std::fmt;
use std::thread;
use std::sync::atomic::{ATOMIC_USIZE_INIT, AtomicUsize, Ordering};
use mioco::{self, ExitStatus, Config, Mioco};
use mioco::mail::*;
use future::{Future, FutureInternal, FutureGuard};
use group::Group;
use queue::{Queue, QueueId, LoopResult};
use util::mioco_handler::{Userdata, blocking_mioco_run_loop, new_coroutine};
use util::unsafe_wrap::NotThreadSafe;
use util::stack::Stack;
static ID: AtomicUsize = ATOMIC_USIZE_INIT;
enum Command
{
Run(Box<FnBox() + Send +'static>),
Wait(MailboxInnerEnd<ExitStatus>),
End,
}
/// Queue executing Tasks serially, non-overlapping in queued Order
///
/// ## Properties
/// - executes tasks in serial order
/// - no tasks may overlap
/// - they never change their native background thread (but may share it)
/// - the tasks are executed in order of queuing
/// - safety against deadlocks from recursive queueing (see example)
///
/// Through these guarantees SerialQueues may bound to a type that is **not** Send or Sync
/// and provide easy thread safe access to this critcal resource.
/// Such a SerialQueue is called [*BoundSerialQueue*](./struct.BoundSerialQueue.html).
///
/// ## Example
///
/// ```rust
/// # use taskqueue::*;
/// init_main(|main| {
/// let thread_one = SerialQueue::new();
/// let thread_two = SerialQueue::new();
///
/// let future_one = thread_one.async(|| {
/// 42
/// });
/// let future_two = thread_two.async(|| {
/// 96
/// });
///
/// println!("Although this is happening in main,");
/// main.async(|| {
/// println!("this task is running before...");
/// });
/// main.sync(|| {
/// println!("...this task and...");
/// assert_eq!(future_one.get() + future_two.get(), 138);
/// });
/// println!("...this is running last");
/// });
/// ```
pub struct SerialQueue
{
id: usize,
tx: MailboxOuterEnd<Command>,
deadlock_tx: MailboxOuterEnd<()>,
}
impl PartialEq for SerialQueue
{
fn eq(&self, other: &Self) -> bool
{
self.id == other.id
}
}
impl fmt::Debug for SerialQueue
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
{
write!(f, "SerialQueue ({})", self.id)
}
}
unsafe impl Send for SerialQueue
{}
unsafe impl Sync for SerialQueue
{}
impl SerialQueue
{
/// Create a new SerialQueue and assign it to the global thread pool
pub fn new() -> SerialQueue
{
let (tx, rx) = mailbox();
let (deadlock_tx, deadlock_rx) = mailbox();
let internal_tx = tx.clone();
new_coroutine(move || {
SerialQueue::do_loop(internal_tx, rx, deadlock_rx);
Ok(())
});
let queue = SerialQueue {
id: ID.fetch_add(1, Ordering::SeqCst),
tx: tx,
deadlock_tx: deadlock_tx,
};
info!("Queue created ({:?})", queue);
queue
}
/// Create a new SerialQueue and assign it solely to a newly created OS Thread
///
/// A SerialQueue created through this method will spawn a new native OS Thread
/// and the queue will be the only utilizing it. The thread will be destructed,
/// when the queue is dropped.
///
/// The purpose of this constructor is to provide a way to use blocking IO with TaskQueue.
/// The use of this method however is discouraged, as the new thread may influence
/// the scheduler negatively and evented IO, where possible performs a lot better in combination
/// with the TaskQueue library
pub fn new_native() -> SerialQueue
{
let (tx, rx) = mailbox();
let (deadlock_tx, deadlock_rx) = mailbox();
let internal_tx = tx.clone();
thread::spawn(move || {
Mioco::new_configured({
let mut config = Config::new();
config.set_thread_num(1);
config
}).start(move || {
SerialQueue::do_loop(internal_tx, rx, deadlock_rx);
Ok(())
});
});
let queue = SerialQueue {
id: ID.fetch_add(1, Ordering::SeqCst),
tx: tx,
deadlock_tx: deadlock_tx,
};
info!("Native Queue created ({:?})", queue);
queue
}
fn do_loop(queue_tx: MailboxOuterEnd<Command>,
rx: MailboxInnerEnd<Command>,
deadlock_rx: MailboxInnerEnd<()>)
{
debug!("loop: spawing serial loop");
loop {
trace!("loop: next iteration");
match rx.read() {
Command::End => break,
Command::Wait(routine) => {
trace!("loop: handling previous deadlocked coroutine");
let tx_clone = queue_tx.clone();
loop {
select!(
routine:r => {
if routine.try_read().is_some() {
trace!("loop: task ended");
break;
} else {
continue;
}
},
deadlock_rx:r => {
if deadlock_rx.try_read().is_some() {
trace!("loop: deadlock detected");
tx_clone.send(Command::Wait(routine));
break;
} else {
continue;
}
},
);
}
}
Command::Run(task) => {
let tx_clone = queue_tx.clone();
mioco::set_children_userdata(Some(Userdata::SameThread));
let routine = mioco::spawn_ext(move || {
trace!("loop: spawned new coroutine for task");
task.call_box(());
Ok(())
})
.exit_notificator();
trace!("loop: wait for deadlock notification or coroutine finish");
loop {
select!(
routine:r => {
if routine.try_read().is_some() {
trace!("loop: task ended");
break;
} else {
continue;
}
},
deadlock_rx:r => {
if deadlock_rx.try_read().is_some() {
trace!("loop: deadlock detected");
tx_clone.send(Command::Wait(routine));
break;
} else {
continue;
}
},
);
}
}
}
}
debug!("loop: queue ended");
}
/// Bind this queue to a variable
///
/// This function allows to create a `BoundSerialQueue`.
/// Its purpose is to bind variables to a queue, so they can be used by the tasks submitted.
///
/// # Example
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// let x = 5;
/// let bound = queue.with(move || x);
/// bound.scoped_with(|x| println!("{}", x));
/// // x gets dropped inside the queues thread before the queue gets dropped
/// ```
///
/// You can create multiple bindings at once.
/// Through tuples you may bind multiple variables at once.
///
/// It is even possible to move the creation of the bound variable into the queue by creating
/// it inside the passed constructor, which is then executed on the queue.
/// And because SerialQueues never change their underlying OS Thread,
/// this allows to use variables that are not Send and Sync in a thread-safe but shared way.
///
/// # Example
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// # fn my_ffi_function() -> *mut () { &mut () };
/// let bound = queue.with(|| {
/// let raw_ptr = my_ffi_function();
/// raw_ptr
/// });
/// bound.scoped_with(|raw_ptr| println!("{}", raw_ptr.is_null()));
/// // raw_ptr gets dropped inside the queues thread.
/// // This way raw_ptr is never moved between threads.
/// ```
pub fn with<'queue, R:'static, F>(&'queue self,
constructor: F)
-> BoundSerialQueue<'queue, R>
where F: FnOnce() -> R + Send
{
let binding = self.sync(move || NotThreadSafe::new(constructor()));
BoundSerialQueue {
queue: self,
binding: binding,
}
}
}
impl Queue for SerialQueue
{
fn async<R, F>(&self, operation: F) -> Future<R>
where R: Send +'static,
F: FnOnce() -> R + Send +'static
{
let (tx, rx) = mailbox();
let operation: Box<FnBox() + Send +'static> = Stack::assemble(self, move || {
tx.send(operation());
});
debug!("Queue ({:?}) queued task", self);
self.tx.send(Command::Run(operation));
Future::new_from_serial(self, Some(self.deadlock_tx.clone()), rx)
}
}
/// A bound SerialQueue holding a queue-bound variable
///
/// Create a BoundSerialQueue using `SerialQueue::with`.
/// BoundSerialQueue's hold variables that may be used through
/// tasks executed on this queue though `scoped_with`, `sync_with`
/// `foreach_with` or `loop_while_with`.
///
/// `async_with` cannot be provided, as the bound variable is
/// dropped, when the BoundSerialQueue gets dropped.
///
/// Internally BoundSerialQueue refer to the same queue, they were created from.
/// Multiple BoundSerialQueue's may exist for one queue at once.
pub struct BoundSerialQueue<'queue, T:'static>
{
queue: &'queue SerialQueue,
binding: NotThreadSafe<T>,
}
impl<'queue, T:'static> fmt::Debug for BoundSerialQueue<'queue, T>
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
{
write!(f, "BoundSerialQueue ({:p})", self)
}
}
impl<'queue, T:'static> BoundSerialQueue<'queue, T>
{
/// Like `Queue::scoped` but provides a mutable reference to the bound variable
///
/// # Safety
///
/// The same rules as for `Queue::scoped` to apply.
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// let bound = queue.with(|| { "Hello".to_string() });
/// let name = "Stack".to_string();
/// bound.scoped_with(|message| { println!("{} {}!", message, name) });
/// ```
pub fn scoped_with<R, F>(&'queue self, operation: F) -> FutureGuard<R>
where R: Send +'static,
F: FnOnce(&'queue mut T) -> R + Send + 'queue
{
self.queue.scoped(move || operation(unsafe { self.binding.get_mut() }))
}
/// Like `Queue::sync` but provides a mutable reference to the bound variable
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// let bound = queue.with(|| { "a bound queue".to_string() });
/// bound.sync_with(|name| { println!("Hello {}", name) });
/// ```
pub fn sync_with<R, F>(&'queue self, operation: F) -> R
where R: Send +'static,
F: FnOnce(&'queue mut T) -> R + Send + 'queue
|
/// Like `Queue::foreach` but provides a mutable reference to the bound variable
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// let bound = queue.with(|| 2);
/// let doubled: Vec<i32> = bound.foreach_with((0..20), |factor, x| x*factor).wait().collect();
/// # assert_eq!((0..20).map(|x| x*2).collect::<Vec<i32>>(), doubled);
/// ```
pub fn foreach_with<B, R, I, F>(&'queue self, mut iter: I, operation: F) -> Group<R>
where B: Send,
R: Send + 'queue,
I: Iterator<Item = B> + Send,
F: Fn(&'queue T, B) -> R + Send + Sync + 'queue
{
let mut group = Group::new();
loop {
match iter.next() {
Some(x) => {
let op = &operation;
let binding = &self.binding;
group.scoped(self, move || op(unsafe { binding.get_mut() }, x))
},
None => break,
}
}
group
}
/// Like `Queue::loop_while` but provides a mutable reference to the bound variable
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// use std::mem;
///
/// let queue = SerialQueue::new();
/// let bound = queue.with(|| (15, 25));
///
/// let greatest_common_divisor = bound.loop_while_with(|tuple| {
/// let x = tuple.0;
/// let y = tuple.1;
///
/// if y == 0 {
/// LoopResult::Done(x.clone())
/// } else {
/// let remainder = x % y;
/// let mut new_tuple = (y, remainder);
/// mem::swap(tuple, &mut new_tuple);
/// LoopResult::Continue
/// }
/// }).get();
/// #
/// # assert_eq!(5, greatest_common_divisor);
/// ```
pub fn loop_while_with<R, F>(&'queue self, operation: F) -> FutureGuard<R>
where F: Fn(&'queue mut T) -> LoopResult<R> + Send + Sync + 'queue,
R: Send +'static,
{
self.queue.loop_while(move || operation(unsafe { self.binding.get_mut() }))
}
}
impl<'a, T:'static> Queue for BoundSerialQueue<'a, T>
{
fn async<R, F>(&self, operation: F) -> Future<R>
where R: Send +'static,
F: FnOnce() -> R + Send +'static
{
self.queue.async(operation)
}
}
impl<'queue, T:'static> Drop for BoundSerialQueue<'queue, T>
{
fn drop(&mut self)
{
let binding = self.binding.clone();
self.queue.async(move || {
unsafe {
binding.drop();
}
});
}
}
impl QueueId for SerialQueue
{
fn id(&self) -> usize
{
self.id
}
}
/// Convert the main thread into a SerialQueue
///
/// This function warps the current thread into a SerialQueue, that
/// is passed to the executed function, blocking the current thread
/// until the created Queue is done.
///
/// This function is only intended to be used on the main thread and
/// library creators should never need to use it.
///
/// If you need a queue based on a newly created OS thread use `SerialQueue::new_native()`.
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// fn main() {
/// init_main(|main_queue| {
/// // start using it!
/// })
/// }
/// ```
pub fn init_main<F: FnOnce(SerialQueue) + Send +'static>(start: F)
{
blocking_mioco_run_loop(move || {
mioco::set_children_userdata(Some(Userdata::RoundRobin));
let (tx, rx) = mailbox::<Command>();
let (deadlock_tx, deadlock_rx) = mailbox::<()>();
let new_queue = SerialQueue {
id: ID.fetch_add(1, Ordering::SeqCst),
tx: tx.clone(),
deadlock_tx: deadlock_tx.clone(),
};
tx.send(Command::Run(
Stack::assemble_main(new_queue, start)
));
info!("Main Queue ready!");
SerialQueue::do_loop(tx, rx, deadlock_rx);
Ok(())
})
}
impl Drop for SerialQueue
{
fn drop(&mut self)
{
trace!("Dropping {:?}", self);
self.tx.send(Command::End);
}
}
| {
self.queue.sync(move || operation(unsafe { self.binding.get_mut() }))
} | identifier_body |
serial.rs | // Copyright 2016 taskqueue developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
use std::boxed::FnBox;
use std::fmt;
use std::thread;
use std::sync::atomic::{ATOMIC_USIZE_INIT, AtomicUsize, Ordering};
use mioco::{self, ExitStatus, Config, Mioco};
use mioco::mail::*;
use future::{Future, FutureInternal, FutureGuard};
use group::Group;
use queue::{Queue, QueueId, LoopResult};
use util::mioco_handler::{Userdata, blocking_mioco_run_loop, new_coroutine};
use util::unsafe_wrap::NotThreadSafe;
use util::stack::Stack;
static ID: AtomicUsize = ATOMIC_USIZE_INIT;
enum Command
{
Run(Box<FnBox() + Send +'static>),
Wait(MailboxInnerEnd<ExitStatus>),
End,
}
/// Queue executing Tasks serially, non-overlapping in queued Order
///
/// ## Properties
/// - executes tasks in serial order
/// - no tasks may overlap
/// - they never change their native background thread (but may share it)
/// - the tasks are executed in order of queuing
/// - safety against deadlocks from recursive queueing (see example)
///
/// Through these guarantees SerialQueues may bound to a type that is **not** Send or Sync
/// and provide easy thread safe access to this critcal resource.
/// Such a SerialQueue is called [*BoundSerialQueue*](./struct.BoundSerialQueue.html).
///
/// ## Example
///
/// ```rust
/// # use taskqueue::*;
/// init_main(|main| {
/// let thread_one = SerialQueue::new();
/// let thread_two = SerialQueue::new();
///
/// let future_one = thread_one.async(|| {
/// 42
/// });
/// let future_two = thread_two.async(|| {
/// 96
/// });
///
/// println!("Although this is happening in main,");
/// main.async(|| {
/// println!("this task is running before...");
/// });
/// main.sync(|| {
/// println!("...this task and...");
/// assert_eq!(future_one.get() + future_two.get(), 138);
/// });
/// println!("...this is running last");
/// });
/// ```
pub struct SerialQueue
{
id: usize,
tx: MailboxOuterEnd<Command>,
deadlock_tx: MailboxOuterEnd<()>,
}
impl PartialEq for SerialQueue
{
fn eq(&self, other: &Self) -> bool
{
self.id == other.id
}
}
impl fmt::Debug for SerialQueue
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
{
write!(f, "SerialQueue ({})", self.id)
}
}
unsafe impl Send for SerialQueue
{}
unsafe impl Sync for SerialQueue
{}
impl SerialQueue
{
/// Create a new SerialQueue and assign it to the global thread pool
pub fn new() -> SerialQueue
{
let (tx, rx) = mailbox();
let (deadlock_tx, deadlock_rx) = mailbox();
let internal_tx = tx.clone();
new_coroutine(move || {
SerialQueue::do_loop(internal_tx, rx, deadlock_rx);
Ok(())
});
let queue = SerialQueue {
id: ID.fetch_add(1, Ordering::SeqCst),
tx: tx,
deadlock_tx: deadlock_tx,
};
info!("Queue created ({:?})", queue);
queue
}
/// Create a new SerialQueue and assign it solely to a newly created OS Thread
///
/// A SerialQueue created through this method will spawn a new native OS Thread
/// and the queue will be the only utilizing it. The thread will be destructed,
/// when the queue is dropped.
///
/// The purpose of this constructor is to provide a way to use blocking IO with TaskQueue.
/// The use of this method however is discouraged, as the new thread may influence
/// the scheduler negatively and evented IO, where possible performs a lot better in combination
/// with the TaskQueue library
pub fn new_native() -> SerialQueue
{
let (tx, rx) = mailbox();
let (deadlock_tx, deadlock_rx) = mailbox();
let internal_tx = tx.clone();
thread::spawn(move || {
Mioco::new_configured({
let mut config = Config::new();
config.set_thread_num(1);
config
}).start(move || {
SerialQueue::do_loop(internal_tx, rx, deadlock_rx);
Ok(())
});
});
let queue = SerialQueue {
id: ID.fetch_add(1, Ordering::SeqCst),
tx: tx,
deadlock_tx: deadlock_tx,
};
info!("Native Queue created ({:?})", queue);
queue
}
fn do_loop(queue_tx: MailboxOuterEnd<Command>,
rx: MailboxInnerEnd<Command>,
deadlock_rx: MailboxInnerEnd<()>)
{
debug!("loop: spawing serial loop");
loop {
trace!("loop: next iteration");
match rx.read() {
Command::End => break,
Command::Wait(routine) => {
trace!("loop: handling previous deadlocked coroutine");
let tx_clone = queue_tx.clone();
loop {
select!(
routine:r => {
if routine.try_read().is_some() {
trace!("loop: task ended");
break;
} else {
continue;
}
},
deadlock_rx:r => {
if deadlock_rx.try_read().is_some() {
trace!("loop: deadlock detected");
tx_clone.send(Command::Wait(routine));
break;
} else {
continue;
}
},
);
}
}
Command::Run(task) => {
let tx_clone = queue_tx.clone();
mioco::set_children_userdata(Some(Userdata::SameThread));
let routine = mioco::spawn_ext(move || {
trace!("loop: spawned new coroutine for task");
task.call_box(());
Ok(())
})
.exit_notificator();
trace!("loop: wait for deadlock notification or coroutine finish");
loop {
select!(
routine:r => {
if routine.try_read().is_some() {
trace!("loop: task ended");
break;
} else {
continue;
}
},
deadlock_rx:r => {
if deadlock_rx.try_read().is_some() {
trace!("loop: deadlock detected");
tx_clone.send(Command::Wait(routine));
break;
} else {
continue;
}
},
);
}
}
}
}
debug!("loop: queue ended");
}
/// Bind this queue to a variable
///
/// This function allows to create a `BoundSerialQueue`.
/// Its purpose is to bind variables to a queue, so they can be used by the tasks submitted.
///
/// # Example
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// let x = 5;
/// let bound = queue.with(move || x);
/// bound.scoped_with(|x| println!("{}", x));
/// // x gets dropped inside the queues thread before the queue gets dropped
/// ```
///
/// You can create multiple bindings at once.
/// Through tuples you may bind multiple variables at once.
///
/// It is even possible to move the creation of the bound variable into the queue by creating
/// it inside the passed constructor, which is then executed on the queue.
/// And because SerialQueues never change their underlying OS Thread,
/// this allows to use variables that are not Send and Sync in a thread-safe but shared way.
///
/// # Example
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// # fn my_ffi_function() -> *mut () { &mut () };
/// let bound = queue.with(|| {
/// let raw_ptr = my_ffi_function();
/// raw_ptr
/// });
/// bound.scoped_with(|raw_ptr| println!("{}", raw_ptr.is_null()));
/// // raw_ptr gets dropped inside the queues thread.
/// // This way raw_ptr is never moved between threads.
/// ```
pub fn with<'queue, R:'static, F>(&'queue self,
constructor: F)
-> BoundSerialQueue<'queue, R>
where F: FnOnce() -> R + Send
{
let binding = self.sync(move || NotThreadSafe::new(constructor()));
BoundSerialQueue {
queue: self,
binding: binding,
}
}
}
impl Queue for SerialQueue
{
fn | <R, F>(&self, operation: F) -> Future<R>
where R: Send +'static,
F: FnOnce() -> R + Send +'static
{
let (tx, rx) = mailbox();
let operation: Box<FnBox() + Send +'static> = Stack::assemble(self, move || {
tx.send(operation());
});
debug!("Queue ({:?}) queued task", self);
self.tx.send(Command::Run(operation));
Future::new_from_serial(self, Some(self.deadlock_tx.clone()), rx)
}
}
/// A bound SerialQueue holding a queue-bound variable
///
/// Create a BoundSerialQueue using `SerialQueue::with`.
/// BoundSerialQueue's hold variables that may be used through
/// tasks executed on this queue though `scoped_with`, `sync_with`
/// `foreach_with` or `loop_while_with`.
///
/// `async_with` cannot be provided, as the bound variable is
/// dropped, when the BoundSerialQueue gets dropped.
///
/// Internally BoundSerialQueue refer to the same queue, they were created from.
/// Multiple BoundSerialQueue's may exist for one queue at once.
pub struct BoundSerialQueue<'queue, T:'static>
{
queue: &'queue SerialQueue,
binding: NotThreadSafe<T>,
}
impl<'queue, T:'static> fmt::Debug for BoundSerialQueue<'queue, T>
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
{
write!(f, "BoundSerialQueue ({:p})", self)
}
}
impl<'queue, T:'static> BoundSerialQueue<'queue, T>
{
/// Like `Queue::scoped` but provides a mutable reference to the bound variable
///
/// # Safety
///
/// The same rules as for `Queue::scoped` to apply.
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// let bound = queue.with(|| { "Hello".to_string() });
/// let name = "Stack".to_string();
/// bound.scoped_with(|message| { println!("{} {}!", message, name) });
/// ```
pub fn scoped_with<R, F>(&'queue self, operation: F) -> FutureGuard<R>
where R: Send +'static,
F: FnOnce(&'queue mut T) -> R + Send + 'queue
{
self.queue.scoped(move || operation(unsafe { self.binding.get_mut() }))
}
/// Like `Queue::sync` but provides a mutable reference to the bound variable
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// let bound = queue.with(|| { "a bound queue".to_string() });
/// bound.sync_with(|name| { println!("Hello {}", name) });
/// ```
pub fn sync_with<R, F>(&'queue self, operation: F) -> R
where R: Send +'static,
F: FnOnce(&'queue mut T) -> R + Send + 'queue
{
self.queue.sync(move || operation(unsafe { self.binding.get_mut() }))
}
/// Like `Queue::foreach` but provides a mutable reference to the bound variable
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// let bound = queue.with(|| 2);
/// let doubled: Vec<i32> = bound.foreach_with((0..20), |factor, x| x*factor).wait().collect();
/// # assert_eq!((0..20).map(|x| x*2).collect::<Vec<i32>>(), doubled);
/// ```
pub fn foreach_with<B, R, I, F>(&'queue self, mut iter: I, operation: F) -> Group<R>
where B: Send,
R: Send + 'queue,
I: Iterator<Item = B> + Send,
F: Fn(&'queue T, B) -> R + Send + Sync + 'queue
{
let mut group = Group::new();
loop {
match iter.next() {
Some(x) => {
let op = &operation;
let binding = &self.binding;
group.scoped(self, move || op(unsafe { binding.get_mut() }, x))
},
None => break,
}
}
group
}
/// Like `Queue::loop_while` but provides a mutable reference to the bound variable
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// use std::mem;
///
/// let queue = SerialQueue::new();
/// let bound = queue.with(|| (15, 25));
///
/// let greatest_common_divisor = bound.loop_while_with(|tuple| {
/// let x = tuple.0;
/// let y = tuple.1;
///
/// if y == 0 {
/// LoopResult::Done(x.clone())
/// } else {
/// let remainder = x % y;
/// let mut new_tuple = (y, remainder);
/// mem::swap(tuple, &mut new_tuple);
/// LoopResult::Continue
/// }
/// }).get();
/// #
/// # assert_eq!(5, greatest_common_divisor);
/// ```
pub fn loop_while_with<R, F>(&'queue self, operation: F) -> FutureGuard<R>
where F: Fn(&'queue mut T) -> LoopResult<R> + Send + Sync + 'queue,
R: Send +'static,
{
self.queue.loop_while(move || operation(unsafe { self.binding.get_mut() }))
}
}
impl<'a, T:'static> Queue for BoundSerialQueue<'a, T>
{
fn async<R, F>(&self, operation: F) -> Future<R>
where R: Send +'static,
F: FnOnce() -> R + Send +'static
{
self.queue.async(operation)
}
}
impl<'queue, T:'static> Drop for BoundSerialQueue<'queue, T>
{
fn drop(&mut self)
{
let binding = self.binding.clone();
self.queue.async(move || {
unsafe {
binding.drop();
}
});
}
}
impl QueueId for SerialQueue
{
fn id(&self) -> usize
{
self.id
}
}
/// Convert the main thread into a SerialQueue
///
/// This function warps the current thread into a SerialQueue, that
/// is passed to the executed function, blocking the current thread
/// until the created Queue is done.
///
/// This function is only intended to be used on the main thread and
/// library creators should never need to use it.
///
/// If you need a queue based on a newly created OS thread use `SerialQueue::new_native()`.
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// fn main() {
/// init_main(|main_queue| {
/// // start using it!
/// })
/// }
/// ```
pub fn init_main<F: FnOnce(SerialQueue) + Send +'static>(start: F)
{
blocking_mioco_run_loop(move || {
mioco::set_children_userdata(Some(Userdata::RoundRobin));
let (tx, rx) = mailbox::<Command>();
let (deadlock_tx, deadlock_rx) = mailbox::<()>();
let new_queue = SerialQueue {
id: ID.fetch_add(1, Ordering::SeqCst),
tx: tx.clone(),
deadlock_tx: deadlock_tx.clone(),
};
tx.send(Command::Run(
Stack::assemble_main(new_queue, start)
));
info!("Main Queue ready!");
SerialQueue::do_loop(tx, rx, deadlock_rx);
Ok(())
})
}
impl Drop for SerialQueue
{
fn drop(&mut self)
{
trace!("Dropping {:?}", self);
self.tx.send(Command::End);
}
}
| async | identifier_name |
serial.rs | // Copyright 2016 taskqueue developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
use std::boxed::FnBox;
use std::fmt;
use std::thread;
use std::sync::atomic::{ATOMIC_USIZE_INIT, AtomicUsize, Ordering};
use mioco::{self, ExitStatus, Config, Mioco};
use mioco::mail::*;
use future::{Future, FutureInternal, FutureGuard};
use group::Group;
use queue::{Queue, QueueId, LoopResult};
use util::mioco_handler::{Userdata, blocking_mioco_run_loop, new_coroutine};
use util::unsafe_wrap::NotThreadSafe;
use util::stack::Stack;
static ID: AtomicUsize = ATOMIC_USIZE_INIT;
enum Command
{
Run(Box<FnBox() + Send +'static>),
Wait(MailboxInnerEnd<ExitStatus>),
End,
}
/// Queue executing Tasks serially, non-overlapping in queued Order
///
/// ## Properties
/// - executes tasks in serial order
/// - no tasks may overlap
/// - they never change their native background thread (but may share it)
/// - the tasks are executed in order of queuing
/// - safety against deadlocks from recursive queueing (see example)
///
/// Through these guarantees SerialQueues may bound to a type that is **not** Send or Sync
/// and provide easy thread safe access to this critcal resource.
/// Such a SerialQueue is called [*BoundSerialQueue*](./struct.BoundSerialQueue.html).
///
/// ## Example
///
/// ```rust
/// # use taskqueue::*;
/// init_main(|main| {
/// let thread_one = SerialQueue::new();
/// let thread_two = SerialQueue::new();
///
/// let future_one = thread_one.async(|| {
/// 42
/// });
/// let future_two = thread_two.async(|| {
/// 96
/// });
///
/// println!("Although this is happening in main,");
/// main.async(|| {
/// println!("this task is running before...");
/// });
/// main.sync(|| {
/// println!("...this task and...");
/// assert_eq!(future_one.get() + future_two.get(), 138);
/// });
/// println!("...this is running last");
/// });
/// ```
pub struct SerialQueue
{
id: usize,
tx: MailboxOuterEnd<Command>,
deadlock_tx: MailboxOuterEnd<()>,
}
impl PartialEq for SerialQueue
{
fn eq(&self, other: &Self) -> bool
{
self.id == other.id
}
} | {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
{
write!(f, "SerialQueue ({})", self.id)
}
}
unsafe impl Send for SerialQueue
{}
unsafe impl Sync for SerialQueue
{}
impl SerialQueue
{
/// Create a new SerialQueue and assign it to the global thread pool
pub fn new() -> SerialQueue
{
let (tx, rx) = mailbox();
let (deadlock_tx, deadlock_rx) = mailbox();
let internal_tx = tx.clone();
new_coroutine(move || {
SerialQueue::do_loop(internal_tx, rx, deadlock_rx);
Ok(())
});
let queue = SerialQueue {
id: ID.fetch_add(1, Ordering::SeqCst),
tx: tx,
deadlock_tx: deadlock_tx,
};
info!("Queue created ({:?})", queue);
queue
}
/// Create a new SerialQueue and assign it solely to a newly created OS Thread
///
/// A SerialQueue created through this method will spawn a new native OS Thread
/// and the queue will be the only utilizing it. The thread will be destructed,
/// when the queue is dropped.
///
/// The purpose of this constructor is to provide a way to use blocking IO with TaskQueue.
/// The use of this method however is discouraged, as the new thread may influence
/// the scheduler negatively and evented IO, where possible performs a lot better in combination
/// with the TaskQueue library
pub fn new_native() -> SerialQueue
{
let (tx, rx) = mailbox();
let (deadlock_tx, deadlock_rx) = mailbox();
let internal_tx = tx.clone();
thread::spawn(move || {
Mioco::new_configured({
let mut config = Config::new();
config.set_thread_num(1);
config
}).start(move || {
SerialQueue::do_loop(internal_tx, rx, deadlock_rx);
Ok(())
});
});
let queue = SerialQueue {
id: ID.fetch_add(1, Ordering::SeqCst),
tx: tx,
deadlock_tx: deadlock_tx,
};
info!("Native Queue created ({:?})", queue);
queue
}
fn do_loop(queue_tx: MailboxOuterEnd<Command>,
rx: MailboxInnerEnd<Command>,
deadlock_rx: MailboxInnerEnd<()>)
{
debug!("loop: spawing serial loop");
loop {
trace!("loop: next iteration");
match rx.read() {
Command::End => break,
Command::Wait(routine) => {
trace!("loop: handling previous deadlocked coroutine");
let tx_clone = queue_tx.clone();
loop {
select!(
routine:r => {
if routine.try_read().is_some() {
trace!("loop: task ended");
break;
} else {
continue;
}
},
deadlock_rx:r => {
if deadlock_rx.try_read().is_some() {
trace!("loop: deadlock detected");
tx_clone.send(Command::Wait(routine));
break;
} else {
continue;
}
},
);
}
}
Command::Run(task) => {
let tx_clone = queue_tx.clone();
mioco::set_children_userdata(Some(Userdata::SameThread));
let routine = mioco::spawn_ext(move || {
trace!("loop: spawned new coroutine for task");
task.call_box(());
Ok(())
})
.exit_notificator();
trace!("loop: wait for deadlock notification or coroutine finish");
loop {
select!(
routine:r => {
if routine.try_read().is_some() {
trace!("loop: task ended");
break;
} else {
continue;
}
},
deadlock_rx:r => {
if deadlock_rx.try_read().is_some() {
trace!("loop: deadlock detected");
tx_clone.send(Command::Wait(routine));
break;
} else {
continue;
}
},
);
}
}
}
}
debug!("loop: queue ended");
}
/// Bind this queue to a variable
///
/// This function allows to create a `BoundSerialQueue`.
/// Its purpose is to bind variables to a queue, so they can be used by the tasks submitted.
///
/// # Example
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// let x = 5;
/// let bound = queue.with(move || x);
/// bound.scoped_with(|x| println!("{}", x));
/// // x gets dropped inside the queues thread before the queue gets dropped
/// ```
///
/// You can create multiple bindings at once.
/// Through tuples you may bind multiple variables at once.
///
/// It is even possible to move the creation of the bound variable into the queue by creating
/// it inside the passed constructor, which is then executed on the queue.
/// And because SerialQueues never change their underlying OS Thread,
/// this allows to use variables that are not Send and Sync in a thread-safe but shared way.
///
/// # Example
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// # fn my_ffi_function() -> *mut () { &mut () };
/// let bound = queue.with(|| {
/// let raw_ptr = my_ffi_function();
/// raw_ptr
/// });
/// bound.scoped_with(|raw_ptr| println!("{}", raw_ptr.is_null()));
/// // raw_ptr gets dropped inside the queues thread.
/// // This way raw_ptr is never moved between threads.
/// ```
pub fn with<'queue, R:'static, F>(&'queue self,
constructor: F)
-> BoundSerialQueue<'queue, R>
where F: FnOnce() -> R + Send
{
let binding = self.sync(move || NotThreadSafe::new(constructor()));
BoundSerialQueue {
queue: self,
binding: binding,
}
}
}
impl Queue for SerialQueue
{
fn async<R, F>(&self, operation: F) -> Future<R>
where R: Send +'static,
F: FnOnce() -> R + Send +'static
{
let (tx, rx) = mailbox();
let operation: Box<FnBox() + Send +'static> = Stack::assemble(self, move || {
tx.send(operation());
});
debug!("Queue ({:?}) queued task", self);
self.tx.send(Command::Run(operation));
Future::new_from_serial(self, Some(self.deadlock_tx.clone()), rx)
}
}
/// A bound SerialQueue holding a queue-bound variable
///
/// Create a BoundSerialQueue using `SerialQueue::with`.
/// BoundSerialQueue's hold variables that may be used through
/// tasks executed on this queue though `scoped_with`, `sync_with`
/// `foreach_with` or `loop_while_with`.
///
/// `async_with` cannot be provided, as the bound variable is
/// dropped, when the BoundSerialQueue gets dropped.
///
/// Internally BoundSerialQueue refer to the same queue, they were created from.
/// Multiple BoundSerialQueue's may exist for one queue at once.
pub struct BoundSerialQueue<'queue, T:'static>
{
queue: &'queue SerialQueue,
binding: NotThreadSafe<T>,
}
impl<'queue, T:'static> fmt::Debug for BoundSerialQueue<'queue, T>
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
{
write!(f, "BoundSerialQueue ({:p})", self)
}
}
impl<'queue, T:'static> BoundSerialQueue<'queue, T>
{
/// Like `Queue::scoped` but provides a mutable reference to the bound variable
///
/// # Safety
///
/// The same rules as for `Queue::scoped` to apply.
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// let bound = queue.with(|| { "Hello".to_string() });
/// let name = "Stack".to_string();
/// bound.scoped_with(|message| { println!("{} {}!", message, name) });
/// ```
pub fn scoped_with<R, F>(&'queue self, operation: F) -> FutureGuard<R>
where R: Send +'static,
F: FnOnce(&'queue mut T) -> R + Send + 'queue
{
self.queue.scoped(move || operation(unsafe { self.binding.get_mut() }))
}
/// Like `Queue::sync` but provides a mutable reference to the bound variable
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// let bound = queue.with(|| { "a bound queue".to_string() });
/// bound.sync_with(|name| { println!("Hello {}", name) });
/// ```
pub fn sync_with<R, F>(&'queue self, operation: F) -> R
where R: Send +'static,
F: FnOnce(&'queue mut T) -> R + Send + 'queue
{
self.queue.sync(move || operation(unsafe { self.binding.get_mut() }))
}
/// Like `Queue::foreach` but provides a mutable reference to the bound variable
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// # let queue = SerialQueue::new();
/// let bound = queue.with(|| 2);
/// let doubled: Vec<i32> = bound.foreach_with((0..20), |factor, x| x*factor).wait().collect();
/// # assert_eq!((0..20).map(|x| x*2).collect::<Vec<i32>>(), doubled);
/// ```
pub fn foreach_with<B, R, I, F>(&'queue self, mut iter: I, operation: F) -> Group<R>
where B: Send,
R: Send + 'queue,
I: Iterator<Item = B> + Send,
F: Fn(&'queue T, B) -> R + Send + Sync + 'queue
{
let mut group = Group::new();
loop {
match iter.next() {
Some(x) => {
let op = &operation;
let binding = &self.binding;
group.scoped(self, move || op(unsafe { binding.get_mut() }, x))
},
None => break,
}
}
group
}
/// Like `Queue::loop_while` but provides a mutable reference to the bound variable
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// use std::mem;
///
/// let queue = SerialQueue::new();
/// let bound = queue.with(|| (15, 25));
///
/// let greatest_common_divisor = bound.loop_while_with(|tuple| {
/// let x = tuple.0;
/// let y = tuple.1;
///
/// if y == 0 {
/// LoopResult::Done(x.clone())
/// } else {
/// let remainder = x % y;
/// let mut new_tuple = (y, remainder);
/// mem::swap(tuple, &mut new_tuple);
/// LoopResult::Continue
/// }
/// }).get();
/// #
/// # assert_eq!(5, greatest_common_divisor);
/// ```
pub fn loop_while_with<R, F>(&'queue self, operation: F) -> FutureGuard<R>
where F: Fn(&'queue mut T) -> LoopResult<R> + Send + Sync + 'queue,
R: Send +'static,
{
self.queue.loop_while(move || operation(unsafe { self.binding.get_mut() }))
}
}
impl<'a, T:'static> Queue for BoundSerialQueue<'a, T>
{
fn async<R, F>(&self, operation: F) -> Future<R>
where R: Send +'static,
F: FnOnce() -> R + Send +'static
{
self.queue.async(operation)
}
}
impl<'queue, T:'static> Drop for BoundSerialQueue<'queue, T>
{
fn drop(&mut self)
{
let binding = self.binding.clone();
self.queue.async(move || {
unsafe {
binding.drop();
}
});
}
}
impl QueueId for SerialQueue
{
fn id(&self) -> usize
{
self.id
}
}
/// Convert the main thread into a SerialQueue
///
/// This function warps the current thread into a SerialQueue, that
/// is passed to the executed function, blocking the current thread
/// until the created Queue is done.
///
/// This function is only intended to be used on the main thread and
/// library creators should never need to use it.
///
/// If you need a queue based on a newly created OS thread use `SerialQueue::new_native()`.
///
/// # Example
///
/// ```
/// # use taskqueue::*;
/// fn main() {
/// init_main(|main_queue| {
/// // start using it!
/// })
/// }
/// ```
pub fn init_main<F: FnOnce(SerialQueue) + Send +'static>(start: F)
{
blocking_mioco_run_loop(move || {
mioco::set_children_userdata(Some(Userdata::RoundRobin));
let (tx, rx) = mailbox::<Command>();
let (deadlock_tx, deadlock_rx) = mailbox::<()>();
let new_queue = SerialQueue {
id: ID.fetch_add(1, Ordering::SeqCst),
tx: tx.clone(),
deadlock_tx: deadlock_tx.clone(),
};
tx.send(Command::Run(
Stack::assemble_main(new_queue, start)
));
info!("Main Queue ready!");
SerialQueue::do_loop(tx, rx, deadlock_rx);
Ok(())
})
}
impl Drop for SerialQueue
{
fn drop(&mut self)
{
trace!("Dropping {:?}", self);
self.tx.send(Command::End);
}
} |
impl fmt::Debug for SerialQueue | random_line_split |
main0.rs | use std::error::Error;
use mio::net::{ TcpListener, TcpStream, UdpSocket};
use mio::{Events, Interest, Poll, Token};
use std::io::{Read, Write};
use hex;
use rand::{thread_rng, Rng};
use keccak_hash::keccak;
use secp256k1::{SecretKey, PublicKey, Message, RecoveryId, Signature, sign, recover};
use rlp::{Rlp, RlpStream};
use std::collections::VecDeque;
mod message;
use message::PeerInfo;
fn print_message_type(message_type: u8) {
match message_type {
0x01 => {
println!("ping message");
},
0x02 => {
println!("pong message");
},
0x03 => {
println!("find neighbours message");
},
0x04 => {
println!("neighbours message");
},
_ => {
println!("unknow message");
},
}
}
fn main() -> Result<(), Box<dyn Error>> {
let mut arr = [0u8; 32];
thread_rng().fill(&mut arr[..]);
// let data = vec![0x83, b'c', b'a', b't'];
// let aa: String = rlp::decode(&data).unwrap();
// println!("aa = {:?}", aa);
// let pk = hex::decode("ee5495585eff78f2fcf95bab21ef1a598c54d1e3c672e23b3bb97a4fc7490660").unwrap();
let private_key = SecretKey::parse_slice(&arr[0..arr.len()]).unwrap();
// let private_key = SecretKey::parse_slice(&pk).unwrap();
let pubkey = PublicKey::from_secret_key(&private_key);
let id = &pubkey.serialize().to_vec()[1..];
println!("id is {:?}", hex::encode(&id));
const CLIENT: Token = Token(0);
const SENDER: Token = Token(0);
let udp_server_ip = "35.180.217.147";
let udp_server_port = "30304";
let upd_server_addr = "35.180.217.147:30304";
let local_udp_addr = "192.168.31.125:30309";
let mut udp_socket = UdpSocket::bind(local_udp_addr.parse()?)?;
// let local_addr = udp_socket.local_addr()?;
//
// println!("local_addr = {:?}", local_addr);
println!("private_key is {:?}", private_key);
let mut poll = Poll::new()?;
let mut events = Events::with_capacity(1024);
let addr = "192.168.31.248:30303".parse()?;
let peer = PeerInfo::from_sock_addr(&upd_server_addr.parse()?);
let local_peer = PeerInfo::from_sock_addr(&local_udp_addr.parse()?);
let mut sent_ping = false;
// message::encode_ping(&peer, &peer, &private_key);
// println!("peer ip {:?}", peer.encode());
// let addr = "127.0.0.1:9000".parse()?;
let mut send_queue: VecDeque<Vec<u8>> = VecDeque::new();
let mut client = TcpStream::connect(addr)?;
let mut status_sent = false;
poll.registry().register(&mut client, CLIENT, Interest::READABLE | Interest::WRITABLE)?;
poll.registry().register(&mut udp_socket, SENDER, Interest::READABLE | Interest::WRITABLE)?;
let mut received_data = Vec::with_capacity(4096);
send_queue.push_back(message::encode_ping(&local_peer, &peer, &private_key));
loop {
poll.poll(&mut events, None)?;
for event in events.iter() {
match event.token() {
SENDER => {
println!("udp socket is active");
if event.is_writable() {
'inner: loop {
if let Some(buf) = send_queue.pop_front() {
match udp_socket.send_to(&buf, upd_server_addr.parse()?) {
Ok(size) => {
println!("sent {:?} bytes(total {:?})", size, buf.len());
// we have some buf remain for next time
if size < buf.len() {
if size == 0 {
send_queue.push_front(buf);
}
break 'inner;
}
},
Err(e) => {
println!("send error {:?}", e);
break 'inner;
}
}
} else {
println!("no data to send, reregister for next writable event");
break 'inner;
}
}
}
if event.is_readable() {
'read: loop {
let mut buf = [0; 1024];
match udp_socket.recv_from(&mut buf) {
Ok((size, addr)) => {
println!("read {:?} bytes from {:?}", size, addr);
if (size > 0) {
let read_buf = &buf[..size];
let hash_signed = keccak(&read_buf[32..]);
println!("hash_signed = {:?}", hash_signed);
println!("check_sum = {:?}", hex::encode(&read_buf[0..32]));
// if hash_signed.as_bytes()!= &read_buf[0..32] {
// // return Box::new(Err("bad protocol"));
// break;
// }
let signed = &read_buf[(32 + 65)..];
let message_type = signed[0];
print_message_type(message_type);
println!("message_type is {:?}", message_type);
let recover_id = RecoveryId::parse(read_buf[32 + 64]).expect("can not get recover id");
println!("recover_id = {:?}", recover_id);
let signature = Signature::parse_slice(&read_buf[32..(32 + 64)]).expect("can not get signature");
let hash = keccak(signed);
let pubkey = recover(&Message::parse_slice(&hash).unwrap(), &signature, &recover_id).expect("can not recover pubkey");
println!("pubkey is {:?}", hex::encode(&pubkey.serialize_compressed().to_vec()));
let rlp = Rlp::new(&signed[1..]);
if message_type == 0x01 {
// got a ping message
let version: u8 = rlp.val_at(0)?;
let from_peer = PeerInfo::decode_rlp(&rlp.at(1)?)?;
let to_peer = PeerInfo::decode_rlp(&rlp.at(2)?)?;
println!("from_peer = {:?}, to_peer = {:?}", from_peer, to_peer);
let timestamp: u64 = rlp.val_at(3)?;
println!("version = {:?}, timestamp = {:?}", version, timestamp);
// send pong message
let from = PeerInfo::from_sock_addr(&addr);
let bytes = message::encode_pong(&from, &read_buf[0..32].to_vec(), ×tamp, &private_key); |
} else if message_type == 0x02 {
// got a pong message
let from_peer = PeerInfo::decode_rlp(&rlp.at(0)?)?;
let hash_bytes = rlp.at(1)?.data()?;
let timestamp: u64 = rlp.val_at(2)?;
println!("got a pong message {:?} {:?}", from_peer, timestamp);
// start send findneighbours packet
let bytes = message::encode_find_node(&private_key);
println!("find node bytes is {:?}", bytes.len());
send_queue.push_back(bytes);
} else if message_type == 0x03 {
println!("got a find node message");
} else if message_type == 0x04 {
println!("got a node message");
}
poll.registry().reregister(&mut udp_socket, event.token(), Interest::WRITABLE)?;
// we have read all data
if (size < buf.len()) {
println!("no more data read");
break'read;
}
} else {
println!("no data read");
break'read;
}
},
Err(e) => {
println!("read error {:?}", e);
break'read;
}
}
}
}
},
CLIENT => {
if event.is_readable() {
println!("client socket is readable");
// read buf
let mut buf = [0; 1024];
match client.read(&mut buf) {
Ok(n) => {
if (n > 0) {
received_data.extend_from_slice(&buf[..n]);
println!("read data: {:?}", String::from_utf8_lossy(&received_data));
}
println!("read {:?} bytes", n);
},
Err(err) => {
println!("read data error {:?}", err);
}
}
}
if event.is_writable() {
// send auth info
let auth = "0196045fa704aa5f5a85f36c6b399b08d823083228d63c4346f382f78a18b684f3a4e64a671de498abf20cba88dd8f3f0a11443bed18248895b981e0c842e9e4fafe387cf9ad619ba89fe7dbfa6f504725bb673a804f3526df31c68a69caf9bc7a9eed62fe73dffdeae5e21f55e2a1ec28e17ad5f98bd0a61759fe25f8f96665278197413d86ab84ea2f3adbf70634b49d13b4b55037e23f393ddc2ae46e63d4c3d1b67945bcf22d03183a1b1ff3b9b74cf3d83a8093489b508759c5042ca0d7de29aa6eb024800868594f848f646f1488c7bbf2a598d411a7333db52168f53e04e28b260e218233e9641232304625ba67cbaa7b6a3703161235ab41758d466701beac1a08e5edc612e42cb7235d43cbdd51ff7bb3cbe4720dfa165f084dafce2c84795eb619016647c9aef4d6d9b31e1a4b1e3b18e856a025ab99275b8b860816259ddf86cdc20c22e0f6f70445258113fade6d38814cb88d8c0693a64880088563cb02ff15236bca24720aaaa9da219c0f2fa71f8a4b1e34793a330b31ccfbdcbaf0026c881d5761b198be428feb93b170afe95174722f";
let buf = hex::decode(auth).unwrap();
if!status_sent {
status_sent = true;
match client.write_all(&buf) {
Ok(_) => {
println!("write ok");
},
Err(err) => {
println!("read data error {:?}", err);
}
}
}
println!("client socket is writable");
}
println!("client event token {:?}", event.token());
},
_ => {
}
}
}
}
println!("Hello, world!");
} | println!("pong bytes is {:?}", bytes.len());
send_queue.push_back(bytes);
// send_queue | random_line_split |
main0.rs | use std::error::Error;
use mio::net::{ TcpListener, TcpStream, UdpSocket};
use mio::{Events, Interest, Poll, Token};
use std::io::{Read, Write};
use hex;
use rand::{thread_rng, Rng};
use keccak_hash::keccak;
use secp256k1::{SecretKey, PublicKey, Message, RecoveryId, Signature, sign, recover};
use rlp::{Rlp, RlpStream};
use std::collections::VecDeque;
mod message;
use message::PeerInfo;
fn print_message_type(message_type: u8) {
match message_type {
0x01 => {
println!("ping message");
},
0x02 => {
println!("pong message");
},
0x03 => {
println!("find neighbours message");
},
0x04 => {
println!("neighbours message");
},
_ => {
println!("unknow message");
},
}
}
fn main() -> Result<(), Box<dyn Error>> {
let mut arr = [0u8; 32];
thread_rng().fill(&mut arr[..]);
// let data = vec![0x83, b'c', b'a', b't'];
// let aa: String = rlp::decode(&data).unwrap();
// println!("aa = {:?}", aa);
// let pk = hex::decode("ee5495585eff78f2fcf95bab21ef1a598c54d1e3c672e23b3bb97a4fc7490660").unwrap();
let private_key = SecretKey::parse_slice(&arr[0..arr.len()]).unwrap();
// let private_key = SecretKey::parse_slice(&pk).unwrap();
let pubkey = PublicKey::from_secret_key(&private_key);
let id = &pubkey.serialize().to_vec()[1..];
println!("id is {:?}", hex::encode(&id));
const CLIENT: Token = Token(0);
const SENDER: Token = Token(0);
let udp_server_ip = "35.180.217.147";
let udp_server_port = "30304";
let upd_server_addr = "35.180.217.147:30304";
let local_udp_addr = "192.168.31.125:30309";
let mut udp_socket = UdpSocket::bind(local_udp_addr.parse()?)?;
// let local_addr = udp_socket.local_addr()?;
//
// println!("local_addr = {:?}", local_addr);
println!("private_key is {:?}", private_key);
let mut poll = Poll::new()?;
let mut events = Events::with_capacity(1024);
let addr = "192.168.31.248:30303".parse()?;
let peer = PeerInfo::from_sock_addr(&upd_server_addr.parse()?);
let local_peer = PeerInfo::from_sock_addr(&local_udp_addr.parse()?);
let mut sent_ping = false;
// message::encode_ping(&peer, &peer, &private_key);
// println!("peer ip {:?}", peer.encode());
// let addr = "127.0.0.1:9000".parse()?;
let mut send_queue: VecDeque<Vec<u8>> = VecDeque::new();
let mut client = TcpStream::connect(addr)?;
let mut status_sent = false;
poll.registry().register(&mut client, CLIENT, Interest::READABLE | Interest::WRITABLE)?;
poll.registry().register(&mut udp_socket, SENDER, Interest::READABLE | Interest::WRITABLE)?;
let mut received_data = Vec::with_capacity(4096);
send_queue.push_back(message::encode_ping(&local_peer, &peer, &private_key));
loop {
poll.poll(&mut events, None)?;
for event in events.iter() {
match event.token() {
SENDER => {
println!("udp socket is active");
if event.is_writable() {
'inner: loop {
if let Some(buf) = send_queue.pop_front() {
match udp_socket.send_to(&buf, upd_server_addr.parse()?) {
Ok(size) => {
println!("sent {:?} bytes(total {:?})", size, buf.len());
// we have some buf remain for next time
if size < buf.len() {
if size == 0 {
send_queue.push_front(buf);
}
break 'inner;
}
},
Err(e) => {
println!("send error {:?}", e);
break 'inner;
}
}
} else {
println!("no data to send, reregister for next writable event");
break 'inner;
}
}
}
if event.is_readable() {
'read: loop {
let mut buf = [0; 1024];
match udp_socket.recv_from(&mut buf) {
Ok((size, addr)) => {
println!("read {:?} bytes from {:?}", size, addr);
if (size > 0) {
let read_buf = &buf[..size];
let hash_signed = keccak(&read_buf[32..]);
println!("hash_signed = {:?}", hash_signed);
println!("check_sum = {:?}", hex::encode(&read_buf[0..32]));
// if hash_signed.as_bytes()!= &read_buf[0..32] {
// // return Box::new(Err("bad protocol"));
// break;
// }
let signed = &read_buf[(32 + 65)..];
let message_type = signed[0];
print_message_type(message_type);
println!("message_type is {:?}", message_type);
let recover_id = RecoveryId::parse(read_buf[32 + 64]).expect("can not get recover id");
println!("recover_id = {:?}", recover_id);
let signature = Signature::parse_slice(&read_buf[32..(32 + 64)]).expect("can not get signature");
let hash = keccak(signed);
let pubkey = recover(&Message::parse_slice(&hash).unwrap(), &signature, &recover_id).expect("can not recover pubkey");
println!("pubkey is {:?}", hex::encode(&pubkey.serialize_compressed().to_vec()));
let rlp = Rlp::new(&signed[1..]);
if message_type == 0x01 {
// got a ping message
let version: u8 = rlp.val_at(0)?;
let from_peer = PeerInfo::decode_rlp(&rlp.at(1)?)?;
let to_peer = PeerInfo::decode_rlp(&rlp.at(2)?)?;
println!("from_peer = {:?}, to_peer = {:?}", from_peer, to_peer);
let timestamp: u64 = rlp.val_at(3)?;
println!("version = {:?}, timestamp = {:?}", version, timestamp);
// send pong message
let from = PeerInfo::from_sock_addr(&addr);
let bytes = message::encode_pong(&from, &read_buf[0..32].to_vec(), ×tamp, &private_key);
println!("pong bytes is {:?}", bytes.len());
send_queue.push_back(bytes);
// send_queue
} else if message_type == 0x02 {
// got a pong message
let from_peer = PeerInfo::decode_rlp(&rlp.at(0)?)?;
let hash_bytes = rlp.at(1)?.data()?;
let timestamp: u64 = rlp.val_at(2)?;
println!("got a pong message {:?} {:?}", from_peer, timestamp);
// start send findneighbours packet
let bytes = message::encode_find_node(&private_key);
println!("find node bytes is {:?}", bytes.len());
send_queue.push_back(bytes);
} else if message_type == 0x03 {
println!("got a find node message");
} else if message_type == 0x04 {
println!("got a node message");
}
poll.registry().reregister(&mut udp_socket, event.token(), Interest::WRITABLE)?;
// we have read all data
if (size < buf.len()) {
println!("no more data read");
break'read;
}
} else {
println!("no data read");
break'read;
}
},
Err(e) => {
println!("read error {:?}", e);
break'read;
}
}
}
}
},
CLIENT => {
if event.is_readable() {
println!("client socket is readable");
// read buf
let mut buf = [0; 1024];
match client.read(&mut buf) {
Ok(n) => {
if (n > 0) {
received_data.extend_from_slice(&buf[..n]);
println!("read data: {:?}", String::from_utf8_lossy(&received_data));
}
println!("read {:?} bytes", n);
},
Err(err) => {
println!("read data error {:?}", err);
}
}
}
if event.is_writable() {
// send auth info
let auth = "0196045fa704aa5f5a85f36c6b399b08d823083228d63c4346f382f78a18b684f3a4e64a671de498abf20cba88dd8f3f0a11443bed18248895b981e0c842e9e4fafe387cf9ad619ba89fe7dbfa6f504725bb673a804f3526df31c68a69caf9bc7a9eed62fe73dffdeae5e21f55e2a1ec28e17ad5f98bd0a61759fe25f8f96665278197413d86ab84ea2f3adbf70634b49d13b4b55037e23f393ddc2ae46e63d4c3d1b67945bcf22d03183a1b1ff3b9b74cf3d83a8093489b508759c5042ca0d7de29aa6eb024800868594f848f646f1488c7bbf2a598d411a7333db52168f53e04e28b260e218233e9641232304625ba67cbaa7b6a3703161235ab41758d466701beac1a08e5edc612e42cb7235d43cbdd51ff7bb3cbe4720dfa165f084dafce2c84795eb619016647c9aef4d6d9b31e1a4b1e3b18e856a025ab99275b8b860816259ddf86cdc20c22e0f6f70445258113fade6d38814cb88d8c0693a64880088563cb02ff15236bca24720aaaa9da219c0f2fa71f8a4b1e34793a330b31ccfbdcbaf0026c881d5761b198be428feb93b170afe95174722f";
let buf = hex::decode(auth).unwrap();
if!status_sent {
status_sent = true;
match client.write_all(&buf) {
Ok(_) => | ,
Err(err) => {
println!("read data error {:?}", err);
}
}
}
println!("client socket is writable");
}
println!("client event token {:?}", event.token());
},
_ => {
}
}
}
}
println!("Hello, world!");
}
| {
println!("write ok");
} | conditional_block |
main0.rs | use std::error::Error;
use mio::net::{ TcpListener, TcpStream, UdpSocket};
use mio::{Events, Interest, Poll, Token};
use std::io::{Read, Write};
use hex;
use rand::{thread_rng, Rng};
use keccak_hash::keccak;
use secp256k1::{SecretKey, PublicKey, Message, RecoveryId, Signature, sign, recover};
use rlp::{Rlp, RlpStream};
use std::collections::VecDeque;
mod message;
use message::PeerInfo;
fn print_message_type(message_type: u8) | },
}
}
fn main() -> Result<(), Box<dyn Error>> {
let mut arr = [0u8; 32];
thread_rng().fill(&mut arr[..]);
// let data = vec![0x83, b'c', b'a', b't'];
// let aa: String = rlp::decode(&data).unwrap();
// println!("aa = {:?}", aa);
// let pk = hex::decode("ee5495585eff78f2fcf95bab21ef1a598c54d1e3c672e23b3bb97a4fc7490660").unwrap();
let private_key = SecretKey::parse_slice(&arr[0..arr.len()]).unwrap();
// let private_key = SecretKey::parse_slice(&pk).unwrap();
let pubkey = PublicKey::from_secret_key(&private_key);
let id = &pubkey.serialize().to_vec()[1..];
println!("id is {:?}", hex::encode(&id));
const CLIENT: Token = Token(0);
const SENDER: Token = Token(0);
let udp_server_ip = "35.180.217.147";
let udp_server_port = "30304";
let upd_server_addr = "35.180.217.147:30304";
let local_udp_addr = "192.168.31.125:30309";
let mut udp_socket = UdpSocket::bind(local_udp_addr.parse()?)?;
// let local_addr = udp_socket.local_addr()?;
//
// println!("local_addr = {:?}", local_addr);
println!("private_key is {:?}", private_key);
let mut poll = Poll::new()?;
let mut events = Events::with_capacity(1024);
let addr = "192.168.31.248:30303".parse()?;
let peer = PeerInfo::from_sock_addr(&upd_server_addr.parse()?);
let local_peer = PeerInfo::from_sock_addr(&local_udp_addr.parse()?);
let mut sent_ping = false;
// message::encode_ping(&peer, &peer, &private_key);
// println!("peer ip {:?}", peer.encode());
// let addr = "127.0.0.1:9000".parse()?;
let mut send_queue: VecDeque<Vec<u8>> = VecDeque::new();
let mut client = TcpStream::connect(addr)?;
let mut status_sent = false;
poll.registry().register(&mut client, CLIENT, Interest::READABLE | Interest::WRITABLE)?;
poll.registry().register(&mut udp_socket, SENDER, Interest::READABLE | Interest::WRITABLE)?;
let mut received_data = Vec::with_capacity(4096);
send_queue.push_back(message::encode_ping(&local_peer, &peer, &private_key));
loop {
poll.poll(&mut events, None)?;
for event in events.iter() {
match event.token() {
SENDER => {
println!("udp socket is active");
if event.is_writable() {
'inner: loop {
if let Some(buf) = send_queue.pop_front() {
match udp_socket.send_to(&buf, upd_server_addr.parse()?) {
Ok(size) => {
println!("sent {:?} bytes(total {:?})", size, buf.len());
// we have some buf remain for next time
if size < buf.len() {
if size == 0 {
send_queue.push_front(buf);
}
break 'inner;
}
},
Err(e) => {
println!("send error {:?}", e);
break 'inner;
}
}
} else {
println!("no data to send, reregister for next writable event");
break 'inner;
}
}
}
if event.is_readable() {
'read: loop {
let mut buf = [0; 1024];
match udp_socket.recv_from(&mut buf) {
Ok((size, addr)) => {
println!("read {:?} bytes from {:?}", size, addr);
if (size > 0) {
let read_buf = &buf[..size];
let hash_signed = keccak(&read_buf[32..]);
println!("hash_signed = {:?}", hash_signed);
println!("check_sum = {:?}", hex::encode(&read_buf[0..32]));
// if hash_signed.as_bytes()!= &read_buf[0..32] {
// // return Box::new(Err("bad protocol"));
// break;
// }
let signed = &read_buf[(32 + 65)..];
let message_type = signed[0];
print_message_type(message_type);
println!("message_type is {:?}", message_type);
let recover_id = RecoveryId::parse(read_buf[32 + 64]).expect("can not get recover id");
println!("recover_id = {:?}", recover_id);
let signature = Signature::parse_slice(&read_buf[32..(32 + 64)]).expect("can not get signature");
let hash = keccak(signed);
let pubkey = recover(&Message::parse_slice(&hash).unwrap(), &signature, &recover_id).expect("can not recover pubkey");
println!("pubkey is {:?}", hex::encode(&pubkey.serialize_compressed().to_vec()));
let rlp = Rlp::new(&signed[1..]);
if message_type == 0x01 {
// got a ping message
let version: u8 = rlp.val_at(0)?;
let from_peer = PeerInfo::decode_rlp(&rlp.at(1)?)?;
let to_peer = PeerInfo::decode_rlp(&rlp.at(2)?)?;
println!("from_peer = {:?}, to_peer = {:?}", from_peer, to_peer);
let timestamp: u64 = rlp.val_at(3)?;
println!("version = {:?}, timestamp = {:?}", version, timestamp);
// send pong message
let from = PeerInfo::from_sock_addr(&addr);
let bytes = message::encode_pong(&from, &read_buf[0..32].to_vec(), ×tamp, &private_key);
println!("pong bytes is {:?}", bytes.len());
send_queue.push_back(bytes);
// send_queue
} else if message_type == 0x02 {
// got a pong message
let from_peer = PeerInfo::decode_rlp(&rlp.at(0)?)?;
let hash_bytes = rlp.at(1)?.data()?;
let timestamp: u64 = rlp.val_at(2)?;
println!("got a pong message {:?} {:?}", from_peer, timestamp);
// start send findneighbours packet
let bytes = message::encode_find_node(&private_key);
println!("find node bytes is {:?}", bytes.len());
send_queue.push_back(bytes);
} else if message_type == 0x03 {
println!("got a find node message");
} else if message_type == 0x04 {
println!("got a node message");
}
poll.registry().reregister(&mut udp_socket, event.token(), Interest::WRITABLE)?;
// we have read all data
if (size < buf.len()) {
println!("no more data read");
break'read;
}
} else {
println!("no data read");
break'read;
}
},
Err(e) => {
println!("read error {:?}", e);
break'read;
}
}
}
}
},
CLIENT => {
if event.is_readable() {
println!("client socket is readable");
// read buf
let mut buf = [0; 1024];
match client.read(&mut buf) {
Ok(n) => {
if (n > 0) {
received_data.extend_from_slice(&buf[..n]);
println!("read data: {:?}", String::from_utf8_lossy(&received_data));
}
println!("read {:?} bytes", n);
},
Err(err) => {
println!("read data error {:?}", err);
}
}
}
if event.is_writable() {
// send auth info
let auth = "0196045fa704aa5f5a85f36c6b399b08d823083228d63c4346f382f78a18b684f3a4e64a671de498abf20cba88dd8f3f0a11443bed18248895b981e0c842e9e4fafe387cf9ad619ba89fe7dbfa6f504725bb673a804f3526df31c68a69caf9bc7a9eed62fe73dffdeae5e21f55e2a1ec28e17ad5f98bd0a61759fe25f8f96665278197413d86ab84ea2f3adbf70634b49d13b4b55037e23f393ddc2ae46e63d4c3d1b67945bcf22d03183a1b1ff3b9b74cf3d83a8093489b508759c5042ca0d7de29aa6eb024800868594f848f646f1488c7bbf2a598d411a7333db52168f53e04e28b260e218233e9641232304625ba67cbaa7b6a3703161235ab41758d466701beac1a08e5edc612e42cb7235d43cbdd51ff7bb3cbe4720dfa165f084dafce2c84795eb619016647c9aef4d6d9b31e1a4b1e3b18e856a025ab99275b8b860816259ddf86cdc20c22e0f6f70445258113fade6d38814cb88d8c0693a64880088563cb02ff15236bca24720aaaa9da219c0f2fa71f8a4b1e34793a330b31ccfbdcbaf0026c881d5761b198be428feb93b170afe95174722f";
let buf = hex::decode(auth).unwrap();
if!status_sent {
status_sent = true;
match client.write_all(&buf) {
Ok(_) => {
println!("write ok");
},
Err(err) => {
println!("read data error {:?}", err);
}
}
}
println!("client socket is writable");
}
println!("client event token {:?}", event.token());
},
_ => {
}
}
}
}
println!("Hello, world!");
}
| {
match message_type {
0x01 => {
println!("ping message");
},
0x02 => {
println!("pong message");
},
0x03 => {
println!("find neighbours message");
},
0x04 => {
println!("neighbours message");
},
_ => {
println!("unknow message"); | identifier_body |
main0.rs | use std::error::Error;
use mio::net::{ TcpListener, TcpStream, UdpSocket};
use mio::{Events, Interest, Poll, Token};
use std::io::{Read, Write};
use hex;
use rand::{thread_rng, Rng};
use keccak_hash::keccak;
use secp256k1::{SecretKey, PublicKey, Message, RecoveryId, Signature, sign, recover};
use rlp::{Rlp, RlpStream};
use std::collections::VecDeque;
mod message;
use message::PeerInfo;
fn print_message_type(message_type: u8) {
match message_type {
0x01 => {
println!("ping message");
},
0x02 => {
println!("pong message");
},
0x03 => {
println!("find neighbours message");
},
0x04 => {
println!("neighbours message");
},
_ => {
println!("unknow message");
},
}
}
fn | () -> Result<(), Box<dyn Error>> {
let mut arr = [0u8; 32];
thread_rng().fill(&mut arr[..]);
// let data = vec![0x83, b'c', b'a', b't'];
// let aa: String = rlp::decode(&data).unwrap();
// println!("aa = {:?}", aa);
// let pk = hex::decode("ee5495585eff78f2fcf95bab21ef1a598c54d1e3c672e23b3bb97a4fc7490660").unwrap();
let private_key = SecretKey::parse_slice(&arr[0..arr.len()]).unwrap();
// let private_key = SecretKey::parse_slice(&pk).unwrap();
let pubkey = PublicKey::from_secret_key(&private_key);
let id = &pubkey.serialize().to_vec()[1..];
println!("id is {:?}", hex::encode(&id));
const CLIENT: Token = Token(0);
const SENDER: Token = Token(0);
let udp_server_ip = "35.180.217.147";
let udp_server_port = "30304";
let upd_server_addr = "35.180.217.147:30304";
let local_udp_addr = "192.168.31.125:30309";
let mut udp_socket = UdpSocket::bind(local_udp_addr.parse()?)?;
// let local_addr = udp_socket.local_addr()?;
//
// println!("local_addr = {:?}", local_addr);
println!("private_key is {:?}", private_key);
let mut poll = Poll::new()?;
let mut events = Events::with_capacity(1024);
let addr = "192.168.31.248:30303".parse()?;
let peer = PeerInfo::from_sock_addr(&upd_server_addr.parse()?);
let local_peer = PeerInfo::from_sock_addr(&local_udp_addr.parse()?);
let mut sent_ping = false;
// message::encode_ping(&peer, &peer, &private_key);
// println!("peer ip {:?}", peer.encode());
// let addr = "127.0.0.1:9000".parse()?;
let mut send_queue: VecDeque<Vec<u8>> = VecDeque::new();
let mut client = TcpStream::connect(addr)?;
let mut status_sent = false;
poll.registry().register(&mut client, CLIENT, Interest::READABLE | Interest::WRITABLE)?;
poll.registry().register(&mut udp_socket, SENDER, Interest::READABLE | Interest::WRITABLE)?;
let mut received_data = Vec::with_capacity(4096);
send_queue.push_back(message::encode_ping(&local_peer, &peer, &private_key));
loop {
poll.poll(&mut events, None)?;
for event in events.iter() {
match event.token() {
SENDER => {
println!("udp socket is active");
if event.is_writable() {
'inner: loop {
if let Some(buf) = send_queue.pop_front() {
match udp_socket.send_to(&buf, upd_server_addr.parse()?) {
Ok(size) => {
println!("sent {:?} bytes(total {:?})", size, buf.len());
// we have some buf remain for next time
if size < buf.len() {
if size == 0 {
send_queue.push_front(buf);
}
break 'inner;
}
},
Err(e) => {
println!("send error {:?}", e);
break 'inner;
}
}
} else {
println!("no data to send, reregister for next writable event");
break 'inner;
}
}
}
if event.is_readable() {
'read: loop {
let mut buf = [0; 1024];
match udp_socket.recv_from(&mut buf) {
Ok((size, addr)) => {
println!("read {:?} bytes from {:?}", size, addr);
if (size > 0) {
let read_buf = &buf[..size];
let hash_signed = keccak(&read_buf[32..]);
println!("hash_signed = {:?}", hash_signed);
println!("check_sum = {:?}", hex::encode(&read_buf[0..32]));
// if hash_signed.as_bytes()!= &read_buf[0..32] {
// // return Box::new(Err("bad protocol"));
// break;
// }
let signed = &read_buf[(32 + 65)..];
let message_type = signed[0];
print_message_type(message_type);
println!("message_type is {:?}", message_type);
let recover_id = RecoveryId::parse(read_buf[32 + 64]).expect("can not get recover id");
println!("recover_id = {:?}", recover_id);
let signature = Signature::parse_slice(&read_buf[32..(32 + 64)]).expect("can not get signature");
let hash = keccak(signed);
let pubkey = recover(&Message::parse_slice(&hash).unwrap(), &signature, &recover_id).expect("can not recover pubkey");
println!("pubkey is {:?}", hex::encode(&pubkey.serialize_compressed().to_vec()));
let rlp = Rlp::new(&signed[1..]);
if message_type == 0x01 {
// got a ping message
let version: u8 = rlp.val_at(0)?;
let from_peer = PeerInfo::decode_rlp(&rlp.at(1)?)?;
let to_peer = PeerInfo::decode_rlp(&rlp.at(2)?)?;
println!("from_peer = {:?}, to_peer = {:?}", from_peer, to_peer);
let timestamp: u64 = rlp.val_at(3)?;
println!("version = {:?}, timestamp = {:?}", version, timestamp);
// send pong message
let from = PeerInfo::from_sock_addr(&addr);
let bytes = message::encode_pong(&from, &read_buf[0..32].to_vec(), ×tamp, &private_key);
println!("pong bytes is {:?}", bytes.len());
send_queue.push_back(bytes);
// send_queue
} else if message_type == 0x02 {
// got a pong message
let from_peer = PeerInfo::decode_rlp(&rlp.at(0)?)?;
let hash_bytes = rlp.at(1)?.data()?;
let timestamp: u64 = rlp.val_at(2)?;
println!("got a pong message {:?} {:?}", from_peer, timestamp);
// start send findneighbours packet
let bytes = message::encode_find_node(&private_key);
println!("find node bytes is {:?}", bytes.len());
send_queue.push_back(bytes);
} else if message_type == 0x03 {
println!("got a find node message");
} else if message_type == 0x04 {
println!("got a node message");
}
poll.registry().reregister(&mut udp_socket, event.token(), Interest::WRITABLE)?;
// we have read all data
if (size < buf.len()) {
println!("no more data read");
break'read;
}
} else {
println!("no data read");
break'read;
}
},
Err(e) => {
println!("read error {:?}", e);
break'read;
}
}
}
}
},
CLIENT => {
if event.is_readable() {
println!("client socket is readable");
// read buf
let mut buf = [0; 1024];
match client.read(&mut buf) {
Ok(n) => {
if (n > 0) {
received_data.extend_from_slice(&buf[..n]);
println!("read data: {:?}", String::from_utf8_lossy(&received_data));
}
println!("read {:?} bytes", n);
},
Err(err) => {
println!("read data error {:?}", err);
}
}
}
if event.is_writable() {
// send auth info
let auth = "0196045fa704aa5f5a85f36c6b399b08d823083228d63c4346f382f78a18b684f3a4e64a671de498abf20cba88dd8f3f0a11443bed18248895b981e0c842e9e4fafe387cf9ad619ba89fe7dbfa6f504725bb673a804f3526df31c68a69caf9bc7a9eed62fe73dffdeae5e21f55e2a1ec28e17ad5f98bd0a61759fe25f8f96665278197413d86ab84ea2f3adbf70634b49d13b4b55037e23f393ddc2ae46e63d4c3d1b67945bcf22d03183a1b1ff3b9b74cf3d83a8093489b508759c5042ca0d7de29aa6eb024800868594f848f646f1488c7bbf2a598d411a7333db52168f53e04e28b260e218233e9641232304625ba67cbaa7b6a3703161235ab41758d466701beac1a08e5edc612e42cb7235d43cbdd51ff7bb3cbe4720dfa165f084dafce2c84795eb619016647c9aef4d6d9b31e1a4b1e3b18e856a025ab99275b8b860816259ddf86cdc20c22e0f6f70445258113fade6d38814cb88d8c0693a64880088563cb02ff15236bca24720aaaa9da219c0f2fa71f8a4b1e34793a330b31ccfbdcbaf0026c881d5761b198be428feb93b170afe95174722f";
let buf = hex::decode(auth).unwrap();
if!status_sent {
status_sent = true;
match client.write_all(&buf) {
Ok(_) => {
println!("write ok");
},
Err(err) => {
println!("read data error {:?}", err);
}
}
}
println!("client socket is writable");
}
println!("client event token {:?}", event.token());
},
_ => {
}
}
}
}
println!("Hello, world!");
}
| main | identifier_name |
tx.rs | use std::convert::TryInto;
use std::io::Write;
use clap;
use bitcoin::hashes::Hash;
use bitcoin;
use elements::encode::{deserialize, serialize};
use elements::{
confidential, AssetIssuance, OutPoint, Transaction, TxIn, TxInWitness, TxOut, TxOutWitness,
Script,
};
use elements::secp256k1_zkp::{
Generator, PedersenCommitment, PublicKey, RangeProof, SurjectionProof, Tweak,
};
use cmd;
use hal_elements::Network;
use hal_elements::confidential::{
ConfidentialAssetInfo, ConfidentialNonceInfo, ConfidentialType, ConfidentialValueInfo,
};
use hal_elements::tx::{
AssetIssuanceInfo, InputInfo, InputWitnessInfo, OutputInfo, OutputWitnessInfo, PeginDataInfo,
PegoutDataInfo, TransactionInfo, InputScriptInfo, OutputScriptInfo,
};
pub fn subcommand<'a>() -> clap::App<'a, 'a> {
cmd::subcommand_group("tx", "manipulate transactions")
.subcommand(cmd_create())
.subcommand(cmd_decode())
}
pub fn execute<'a>(matches: &clap::ArgMatches<'a>) {
match matches.subcommand() {
("create", Some(ref m)) => exec_create(&m),
("decode", Some(ref m)) => exec_decode(&m),
(_, _) => unreachable!("clap prints help"),
};
}
fn cmd_create<'a>() -> clap::App<'a, 'a> {
cmd::subcommand("create", "create a raw transaction from JSON").args(&[
cmd::arg("tx-info", "the transaction info in JSON").required(false),
cmd::opt("raw-stdout", "output the raw bytes of the result to stdout")
.short("r")
.required(false),
])
}
/// Check both ways to specify the outpoint and panic if conflicting.
fn outpoint_from_input_info(input: &InputInfo) -> OutPoint {
let op1: Option<OutPoint> =
input.prevout.as_ref().map(|ref op| op.parse().expect("invalid prevout format"));
let op2 = match input.txid {
Some(txid) => match input.vout {
Some(vout) => Some(OutPoint {
txid: txid,
vout: vout,
}),
None => panic!("\"txid\" field given in input without \"vout\" field"),
},
None => None,
};
match (op1, op2) {
(Some(op1), Some(op2)) => {
if op1!= op2 {
panic!("Conflicting prevout information in input.");
}
op1
}
(Some(op), None) => op,
(None, Some(op)) => op,
(None, None) => panic!("No previous output provided in input."),
}
}
fn bytes_32(bytes: &[u8]) -> Option<[u8; 32]> {
if bytes.len()!= 32 {
None
} else {
let mut array = [0; 32];
for (x, y) in bytes.iter().zip(array.iter_mut()) {
*y = *x;
}
Some(array)
}
}
fn create_confidential_value(info: ConfidentialValueInfo) -> confidential::Value {
match info.type_ {
ConfidentialType::Null => confidential::Value::Null,
ConfidentialType::Explicit => confidential::Value::Explicit(
info.value.expect("Field \"value\" is required for explicit values."),
),
ConfidentialType::Confidential => {
let comm = PedersenCommitment::from_slice(
&info.commitment
.expect("Field \"commitment\" is required for confidential values.")
.0[..]
).expect("invalid confidential commitment");
confidential::Value::Confidential(comm)
}
}
}
fn create_confidential_asset(info: ConfidentialAssetInfo) -> confidential::Asset {
match info.type_ {
ConfidentialType::Null => confidential::Asset::Null,
ConfidentialType::Explicit => confidential::Asset::Explicit(
info.asset.expect("Field \"asset\" is required for explicit assets."),
),
ConfidentialType::Confidential => {
let gen = Generator::from_slice(
&info.commitment
.expect("Field \"commitment\" is required for confidential values.")
.0[..]
).expect("invalid confidential commitment");
confidential::Asset::Confidential(gen)
}
}
}
fn create_confidential_nonce(info: ConfidentialNonceInfo) -> confidential::Nonce {
match info.type_ {
ConfidentialType::Null => confidential::Nonce::Null,
ConfidentialType::Explicit => confidential::Nonce::Explicit(bytes_32(
&info.nonce
.expect("Field \"nonce\" is required for asset issuances.")
.0[..],
).expect("wrong size of \"nonce\" field")),
ConfidentialType::Confidential => {
let pubkey = PublicKey::from_slice(
&info.commitment
.expect("Field \"commitment\" is required for confidential values.")
.0[..]
).expect("invalid confidential commitment");
confidential::Nonce::Confidential(pubkey)
}
}
}
fn create_asset_issuance(info: AssetIssuanceInfo) -> AssetIssuance {
AssetIssuance {
asset_blinding_nonce: Tweak::from_slice(
&info.asset_blinding_nonce
.expect("Field \"asset_blinding_nonce\" is required for asset issuances.")
.0[..]
).expect("Invalid \"asset_blinding_nonce\"."),
asset_entropy: bytes_32(
&info.asset_entropy
.expect("Field \"asset_entropy\" is required for asset issuances.")
.0[..],
).expect("Invalid size of \"asset_entropy\"."),
amount: create_confidential_value(
info.amount.expect("Field \"amount\" is required for asset issuances."),
),
inflation_keys: create_confidential_value(
info.inflation_keys.expect("Field \"inflation_keys\" is required for asset issuances."),
),
}
}
fn create_script_sig(ss: InputScriptInfo) -> Script {
if let Some(hex) = ss.hex {
if ss.asm.is_some() {
warn!("Field \"asm\" of input is ignored.");
}
hex.0.into()
} else if let Some(_) = ss.asm {
panic!("Decoding script assembly is not yet supported.");
} else {
panic!("No scriptSig info provided.");
}
}
fn create_pegin_witness(pd: PeginDataInfo, prevout: bitcoin::OutPoint) -> Vec<Vec<u8>> {
if prevout!= pd.outpoint.parse().expect("Invalid outpoint in field \"pegin_data\".") {
panic!("Outpoint in \"pegin_data\" does not correspond to input value.");
}
let asset = match create_confidential_asset(pd.asset) {
confidential::Asset::Explicit(asset) => asset,
_ => panic!("Asset in \"pegin_data\" should be explicit."),
};
vec![
serialize(&pd.value),
serialize(&asset),
serialize(&pd.genesis_hash),
serialize(&pd.claim_script.0),
serialize(&pd.mainchain_tx_hex.0),
serialize(&pd.merkle_proof.0),
]
}
fn convert_outpoint_to_btc(p: elements::OutPoint) -> bitcoin::OutPoint {
bitcoin::OutPoint {
txid: bitcoin::Txid::from_inner(p.txid.into_inner()),
vout: p.vout,
}
}
fn create_input_witness(
info: Option<InputWitnessInfo>,
pd: Option<PeginDataInfo>,
prevout: OutPoint,
) -> TxInWitness {
let pegin_witness = if info.is_some() && info.as_ref().unwrap().pegin_witness.is_some() {
if pd.is_some() {
warn!("Field \"pegin_data\" of input is ignored.");
}
info.as_ref().unwrap().pegin_witness.clone().unwrap().iter().map(|h| h.clone().0).collect()
} else if let Some(pd) = pd {
create_pegin_witness(pd, convert_outpoint_to_btc(prevout))
} else {
Default::default()
};
if let Some(wi) = info {
TxInWitness {
amount_rangeproof: wi.amount_rangeproof
.map(|b| Box::new(RangeProof::from_slice(&b.0).expect("invalid rangeproof"))),
inflation_keys_rangeproof: wi.inflation_keys_rangeproof
.map(|b| Box::new(RangeProof::from_slice(&b.0).expect("invalid rangeproof"))),
script_witness: match wi.script_witness {
Some(ref w) => w.iter().map(|h| h.clone().0).collect(),
None => Vec::new(),
},
pegin_witness: pegin_witness,
}
} else {
TxInWitness {
pegin_witness: pegin_witness,
..Default::default()
}
}
}
fn create_input(input: InputInfo) -> TxIn | }
fn create_script_pubkey(spk: OutputScriptInfo, used_network: &mut Option<Network>) -> Script {
if spk.type_.is_some() {
warn!("Field \"type\" of output is ignored.");
}
if let Some(hex) = spk.hex {
if spk.asm.is_some() {
warn!("Field \"asm\" of output is ignored.");
}
if spk.address.is_some() {
warn!("Field \"address\" of output is ignored.");
}
//TODO(stevenroose) do script sanity check to avoid blackhole?
hex.0.into()
} else if let Some(_) = spk.asm {
if spk.address.is_some() {
warn!("Field \"address\" of output is ignored.");
}
panic!("Decoding script assembly is not yet supported.");
} else if let Some(address) = spk.address {
// Error if another network had already been used.
if let Some(network) = Network::from_params(address.params) {
if used_network.replace(network).unwrap_or(network)!= network {
panic!("Addresses for different networks are used in the output scripts.");
}
}
address.script_pubkey()
} else {
panic!("No scriptPubKey info provided.");
}
}
fn create_bitcoin_script_pubkey(spk: hal::tx::OutputScriptInfo) -> bitcoin::Script {
if spk.type_.is_some() {
warn!("Field \"type\" of output is ignored.");
}
if let Some(hex) = spk.hex {
if spk.asm.is_some() {
warn!("Field \"asm\" of output is ignored.");
}
if spk.address.is_some() {
warn!("Field \"address\" of output is ignored.");
}
//TODO(stevenroose) do script sanity check to avoid blackhole?
hex.0.into()
} else if let Some(_) = spk.asm {
if spk.address.is_some() {
warn!("Field \"address\" of output is ignored.");
}
panic!("Decoding script assembly is not yet supported.");
} else if let Some(address) = spk.address {
address.script_pubkey()
} else {
panic!("No scriptPubKey info provided.");
}
}
fn create_output_witness(w: OutputWitnessInfo) -> TxOutWitness {
TxOutWitness {
surjection_proof: w.surjection_proof.map(|b| {
Box::new(SurjectionProof::from_slice(&b.0[..]).expect("invalid surjection proof"))
}),
rangeproof: w.rangeproof.map(|b| {
Box::new(RangeProof::from_slice(&b.0[..]).expect("invalid rangeproof"))
}),
}
}
fn create_script_pubkey_from_pegout_data(
pd: PegoutDataInfo,
) -> Script {
let mut builder = elements::script::Builder::new()
.push_opcode(elements::opcodes::all::OP_RETURN)
.push_slice(&pd.genesis_hash.into_inner()[..])
.push_slice(&create_bitcoin_script_pubkey(pd.script_pub_key)[..]);
for d in pd.extra_data {
builder = builder.push_slice(&d.0);
}
builder.into_script()
}
fn create_output(output: OutputInfo) -> TxOut {
// Keep track of which network has been used in addresses and error if two different networks
// are used.
let mut used_network = None;
let value = output
.value
.map(create_confidential_value)
.expect("Field \"value\" is required for outputs.");
let asset = output
.asset
.map(create_confidential_asset)
.expect("Field \"asset\" is required for outputs.");
TxOut {
asset: asset,
value: value,
nonce: output.nonce.map(create_confidential_nonce).unwrap_or(confidential::Nonce::Null),
script_pubkey: if let Some(spk) = output.script_pub_key {
if output.pegout_data.is_some() {
warn!("Field \"pegout_data\" of output is ignored.");
}
create_script_pubkey(spk, &mut used_network)
} else if let Some(pd) = output.pegout_data {
match value {
confidential::Value::Explicit(v) => {
if v!= pd.value {
panic!("Value in \"pegout_data\" does not correspond to output value.");
}
}
_ => panic!("Explicit value is required for pegout data."),
}
if asset!= create_confidential_asset(pd.asset.clone()) {
panic!("Asset in \"pegout_data\" does not correspond to output value.");
}
create_script_pubkey_from_pegout_data(pd)
} else {
Default::default()
},
witness: output.witness.map(create_output_witness).unwrap_or_default(),
}
}
pub fn create_transaction(info: TransactionInfo) -> Transaction {
// Fields that are ignored.
if info.txid.is_some() {
warn!("Field \"txid\" is ignored.");
}
if info.hash.is_some() {
warn!("Field \"hash\" is ignored.");
}
if info.size.is_some() {
warn!("Field \"size\" is ignored.");
}
if info.weight.is_some() {
warn!("Field \"weight\" is ignored.");
}
if info.vsize.is_some() {
warn!("Field \"vsize\" is ignored.");
}
Transaction {
version: info.version.expect("Field \"version\" is required."),
lock_time: elements::PackedLockTime(info.locktime.expect("Field \"locktime\" is required.")),
input: info
.inputs
.expect("Field \"inputs\" is required.")
.into_iter()
.map(create_input)
.collect(),
output: info
.outputs
.expect("Field \"outputs\" is required.")
.into_iter()
.map(create_output)
.collect(),
}
}
fn exec_create<'a>(matches: &clap::ArgMatches<'a>) {
let info = serde_json::from_str::<TransactionInfo>(&cmd::arg_or_stdin(matches, "tx-info"))
.expect("invalid JSON provided");
let tx = create_transaction(info);
let tx_bytes = serialize(&tx);
if matches.is_present("raw-stdout") {
::std::io::stdout().write_all(&tx_bytes).unwrap();
} else {
print!("{}", hex::encode(&tx_bytes));
}
}
fn cmd_decode<'a>() -> clap::App<'a, 'a> {
cmd::subcommand("decode", "decode a raw transaction to JSON")
.args(&cmd::opts_networks())
.args(&[cmd::opt_yaml(), cmd::arg("raw-tx", "the raw transaction in hex").required(false)])
}
fn exec_decode<'a>(matches: &clap::ArgMatches<'a>) {
let hex_tx = cmd::arg_or_stdin(matches, "raw-tx");
let raw_tx = hex::decode(hex_tx.as_ref()).expect("could not decode raw tx");
let tx: Transaction = deserialize(&raw_tx).expect("invalid tx format");
let info = ::GetInfo::get_info(&tx, cmd::network(matches));
cmd::print_output(matches, &info)
}
| {
let has_issuance = input.has_issuance.unwrap_or(input.asset_issuance.is_some());
let is_pegin = input.is_pegin.unwrap_or(input.pegin_data.is_some());
let prevout = outpoint_from_input_info(&input);
TxIn {
previous_output: prevout,
script_sig: input.script_sig.map(create_script_sig).unwrap_or_default(),
sequence: elements::Sequence::from_height(input.sequence.unwrap_or_default().try_into().unwrap()),
is_pegin: is_pegin,
asset_issuance: if has_issuance {
input.asset_issuance.map(create_asset_issuance).unwrap_or_default()
} else {
if input.asset_issuance.is_some() {
warn!("Field \"asset_issuance\" of input is ignored.");
}
Default::default()
},
witness: create_input_witness(input.witness, input.pegin_data, prevout),
} | identifier_body |
tx.rs | use std::convert::TryInto;
use std::io::Write;
use clap;
use bitcoin::hashes::Hash;
use bitcoin;
use elements::encode::{deserialize, serialize};
use elements::{
confidential, AssetIssuance, OutPoint, Transaction, TxIn, TxInWitness, TxOut, TxOutWitness,
Script,
};
use elements::secp256k1_zkp::{
Generator, PedersenCommitment, PublicKey, RangeProof, SurjectionProof, Tweak,
};
use cmd;
use hal_elements::Network;
use hal_elements::confidential::{
ConfidentialAssetInfo, ConfidentialNonceInfo, ConfidentialType, ConfidentialValueInfo,
};
use hal_elements::tx::{
AssetIssuanceInfo, InputInfo, InputWitnessInfo, OutputInfo, OutputWitnessInfo, PeginDataInfo,
PegoutDataInfo, TransactionInfo, InputScriptInfo, OutputScriptInfo,
};
pub fn subcommand<'a>() -> clap::App<'a, 'a> {
cmd::subcommand_group("tx", "manipulate transactions")
.subcommand(cmd_create())
.subcommand(cmd_decode())
}
pub fn execute<'a>(matches: &clap::ArgMatches<'a>) {
match matches.subcommand() {
("create", Some(ref m)) => exec_create(&m),
("decode", Some(ref m)) => exec_decode(&m),
(_, _) => unreachable!("clap prints help"),
};
}
fn cmd_create<'a>() -> clap::App<'a, 'a> {
cmd::subcommand("create", "create a raw transaction from JSON").args(&[
cmd::arg("tx-info", "the transaction info in JSON").required(false),
cmd::opt("raw-stdout", "output the raw bytes of the result to stdout")
.short("r")
.required(false),
])
}
/// Check both ways to specify the outpoint and panic if conflicting.
fn | (input: &InputInfo) -> OutPoint {
let op1: Option<OutPoint> =
input.prevout.as_ref().map(|ref op| op.parse().expect("invalid prevout format"));
let op2 = match input.txid {
Some(txid) => match input.vout {
Some(vout) => Some(OutPoint {
txid: txid,
vout: vout,
}),
None => panic!("\"txid\" field given in input without \"vout\" field"),
},
None => None,
};
match (op1, op2) {
(Some(op1), Some(op2)) => {
if op1!= op2 {
panic!("Conflicting prevout information in input.");
}
op1
}
(Some(op), None) => op,
(None, Some(op)) => op,
(None, None) => panic!("No previous output provided in input."),
}
}
fn bytes_32(bytes: &[u8]) -> Option<[u8; 32]> {
if bytes.len()!= 32 {
None
} else {
let mut array = [0; 32];
for (x, y) in bytes.iter().zip(array.iter_mut()) {
*y = *x;
}
Some(array)
}
}
fn create_confidential_value(info: ConfidentialValueInfo) -> confidential::Value {
match info.type_ {
ConfidentialType::Null => confidential::Value::Null,
ConfidentialType::Explicit => confidential::Value::Explicit(
info.value.expect("Field \"value\" is required for explicit values."),
),
ConfidentialType::Confidential => {
let comm = PedersenCommitment::from_slice(
&info.commitment
.expect("Field \"commitment\" is required for confidential values.")
.0[..]
).expect("invalid confidential commitment");
confidential::Value::Confidential(comm)
}
}
}
fn create_confidential_asset(info: ConfidentialAssetInfo) -> confidential::Asset {
match info.type_ {
ConfidentialType::Null => confidential::Asset::Null,
ConfidentialType::Explicit => confidential::Asset::Explicit(
info.asset.expect("Field \"asset\" is required for explicit assets."),
),
ConfidentialType::Confidential => {
let gen = Generator::from_slice(
&info.commitment
.expect("Field \"commitment\" is required for confidential values.")
.0[..]
).expect("invalid confidential commitment");
confidential::Asset::Confidential(gen)
}
}
}
fn create_confidential_nonce(info: ConfidentialNonceInfo) -> confidential::Nonce {
match info.type_ {
ConfidentialType::Null => confidential::Nonce::Null,
ConfidentialType::Explicit => confidential::Nonce::Explicit(bytes_32(
&info.nonce
.expect("Field \"nonce\" is required for asset issuances.")
.0[..],
).expect("wrong size of \"nonce\" field")),
ConfidentialType::Confidential => {
let pubkey = PublicKey::from_slice(
&info.commitment
.expect("Field \"commitment\" is required for confidential values.")
.0[..]
).expect("invalid confidential commitment");
confidential::Nonce::Confidential(pubkey)
}
}
}
fn create_asset_issuance(info: AssetIssuanceInfo) -> AssetIssuance {
AssetIssuance {
asset_blinding_nonce: Tweak::from_slice(
&info.asset_blinding_nonce
.expect("Field \"asset_blinding_nonce\" is required for asset issuances.")
.0[..]
).expect("Invalid \"asset_blinding_nonce\"."),
asset_entropy: bytes_32(
&info.asset_entropy
.expect("Field \"asset_entropy\" is required for asset issuances.")
.0[..],
).expect("Invalid size of \"asset_entropy\"."),
amount: create_confidential_value(
info.amount.expect("Field \"amount\" is required for asset issuances."),
),
inflation_keys: create_confidential_value(
info.inflation_keys.expect("Field \"inflation_keys\" is required for asset issuances."),
),
}
}
fn create_script_sig(ss: InputScriptInfo) -> Script {
if let Some(hex) = ss.hex {
if ss.asm.is_some() {
warn!("Field \"asm\" of input is ignored.");
}
hex.0.into()
} else if let Some(_) = ss.asm {
panic!("Decoding script assembly is not yet supported.");
} else {
panic!("No scriptSig info provided.");
}
}
fn create_pegin_witness(pd: PeginDataInfo, prevout: bitcoin::OutPoint) -> Vec<Vec<u8>> {
if prevout!= pd.outpoint.parse().expect("Invalid outpoint in field \"pegin_data\".") {
panic!("Outpoint in \"pegin_data\" does not correspond to input value.");
}
let asset = match create_confidential_asset(pd.asset) {
confidential::Asset::Explicit(asset) => asset,
_ => panic!("Asset in \"pegin_data\" should be explicit."),
};
vec![
serialize(&pd.value),
serialize(&asset),
serialize(&pd.genesis_hash),
serialize(&pd.claim_script.0),
serialize(&pd.mainchain_tx_hex.0),
serialize(&pd.merkle_proof.0),
]
}
fn convert_outpoint_to_btc(p: elements::OutPoint) -> bitcoin::OutPoint {
bitcoin::OutPoint {
txid: bitcoin::Txid::from_inner(p.txid.into_inner()),
vout: p.vout,
}
}
fn create_input_witness(
info: Option<InputWitnessInfo>,
pd: Option<PeginDataInfo>,
prevout: OutPoint,
) -> TxInWitness {
let pegin_witness = if info.is_some() && info.as_ref().unwrap().pegin_witness.is_some() {
if pd.is_some() {
warn!("Field \"pegin_data\" of input is ignored.");
}
info.as_ref().unwrap().pegin_witness.clone().unwrap().iter().map(|h| h.clone().0).collect()
} else if let Some(pd) = pd {
create_pegin_witness(pd, convert_outpoint_to_btc(prevout))
} else {
Default::default()
};
if let Some(wi) = info {
TxInWitness {
amount_rangeproof: wi.amount_rangeproof
.map(|b| Box::new(RangeProof::from_slice(&b.0).expect("invalid rangeproof"))),
inflation_keys_rangeproof: wi.inflation_keys_rangeproof
.map(|b| Box::new(RangeProof::from_slice(&b.0).expect("invalid rangeproof"))),
script_witness: match wi.script_witness {
Some(ref w) => w.iter().map(|h| h.clone().0).collect(),
None => Vec::new(),
},
pegin_witness: pegin_witness,
}
} else {
TxInWitness {
pegin_witness: pegin_witness,
..Default::default()
}
}
}
fn create_input(input: InputInfo) -> TxIn {
let has_issuance = input.has_issuance.unwrap_or(input.asset_issuance.is_some());
let is_pegin = input.is_pegin.unwrap_or(input.pegin_data.is_some());
let prevout = outpoint_from_input_info(&input);
TxIn {
previous_output: prevout,
script_sig: input.script_sig.map(create_script_sig).unwrap_or_default(),
sequence: elements::Sequence::from_height(input.sequence.unwrap_or_default().try_into().unwrap()),
is_pegin: is_pegin,
asset_issuance: if has_issuance {
input.asset_issuance.map(create_asset_issuance).unwrap_or_default()
} else {
if input.asset_issuance.is_some() {
warn!("Field \"asset_issuance\" of input is ignored.");
}
Default::default()
},
witness: create_input_witness(input.witness, input.pegin_data, prevout),
}
}
fn create_script_pubkey(spk: OutputScriptInfo, used_network: &mut Option<Network>) -> Script {
if spk.type_.is_some() {
warn!("Field \"type\" of output is ignored.");
}
if let Some(hex) = spk.hex {
if spk.asm.is_some() {
warn!("Field \"asm\" of output is ignored.");
}
if spk.address.is_some() {
warn!("Field \"address\" of output is ignored.");
}
//TODO(stevenroose) do script sanity check to avoid blackhole?
hex.0.into()
} else if let Some(_) = spk.asm {
if spk.address.is_some() {
warn!("Field \"address\" of output is ignored.");
}
panic!("Decoding script assembly is not yet supported.");
} else if let Some(address) = spk.address {
// Error if another network had already been used.
if let Some(network) = Network::from_params(address.params) {
if used_network.replace(network).unwrap_or(network)!= network {
panic!("Addresses for different networks are used in the output scripts.");
}
}
address.script_pubkey()
} else {
panic!("No scriptPubKey info provided.");
}
}
fn create_bitcoin_script_pubkey(spk: hal::tx::OutputScriptInfo) -> bitcoin::Script {
if spk.type_.is_some() {
warn!("Field \"type\" of output is ignored.");
}
if let Some(hex) = spk.hex {
if spk.asm.is_some() {
warn!("Field \"asm\" of output is ignored.");
}
if spk.address.is_some() {
warn!("Field \"address\" of output is ignored.");
}
//TODO(stevenroose) do script sanity check to avoid blackhole?
hex.0.into()
} else if let Some(_) = spk.asm {
if spk.address.is_some() {
warn!("Field \"address\" of output is ignored.");
}
panic!("Decoding script assembly is not yet supported.");
} else if let Some(address) = spk.address {
address.script_pubkey()
} else {
panic!("No scriptPubKey info provided.");
}
}
fn create_output_witness(w: OutputWitnessInfo) -> TxOutWitness {
TxOutWitness {
surjection_proof: w.surjection_proof.map(|b| {
Box::new(SurjectionProof::from_slice(&b.0[..]).expect("invalid surjection proof"))
}),
rangeproof: w.rangeproof.map(|b| {
Box::new(RangeProof::from_slice(&b.0[..]).expect("invalid rangeproof"))
}),
}
}
fn create_script_pubkey_from_pegout_data(
pd: PegoutDataInfo,
) -> Script {
let mut builder = elements::script::Builder::new()
.push_opcode(elements::opcodes::all::OP_RETURN)
.push_slice(&pd.genesis_hash.into_inner()[..])
.push_slice(&create_bitcoin_script_pubkey(pd.script_pub_key)[..]);
for d in pd.extra_data {
builder = builder.push_slice(&d.0);
}
builder.into_script()
}
fn create_output(output: OutputInfo) -> TxOut {
// Keep track of which network has been used in addresses and error if two different networks
// are used.
let mut used_network = None;
let value = output
.value
.map(create_confidential_value)
.expect("Field \"value\" is required for outputs.");
let asset = output
.asset
.map(create_confidential_asset)
.expect("Field \"asset\" is required for outputs.");
TxOut {
asset: asset,
value: value,
nonce: output.nonce.map(create_confidential_nonce).unwrap_or(confidential::Nonce::Null),
script_pubkey: if let Some(spk) = output.script_pub_key {
if output.pegout_data.is_some() {
warn!("Field \"pegout_data\" of output is ignored.");
}
create_script_pubkey(spk, &mut used_network)
} else if let Some(pd) = output.pegout_data {
match value {
confidential::Value::Explicit(v) => {
if v!= pd.value {
panic!("Value in \"pegout_data\" does not correspond to output value.");
}
}
_ => panic!("Explicit value is required for pegout data."),
}
if asset!= create_confidential_asset(pd.asset.clone()) {
panic!("Asset in \"pegout_data\" does not correspond to output value.");
}
create_script_pubkey_from_pegout_data(pd)
} else {
Default::default()
},
witness: output.witness.map(create_output_witness).unwrap_or_default(),
}
}
pub fn create_transaction(info: TransactionInfo) -> Transaction {
// Fields that are ignored.
if info.txid.is_some() {
warn!("Field \"txid\" is ignored.");
}
if info.hash.is_some() {
warn!("Field \"hash\" is ignored.");
}
if info.size.is_some() {
warn!("Field \"size\" is ignored.");
}
if info.weight.is_some() {
warn!("Field \"weight\" is ignored.");
}
if info.vsize.is_some() {
warn!("Field \"vsize\" is ignored.");
}
Transaction {
version: info.version.expect("Field \"version\" is required."),
lock_time: elements::PackedLockTime(info.locktime.expect("Field \"locktime\" is required.")),
input: info
.inputs
.expect("Field \"inputs\" is required.")
.into_iter()
.map(create_input)
.collect(),
output: info
.outputs
.expect("Field \"outputs\" is required.")
.into_iter()
.map(create_output)
.collect(),
}
}
fn exec_create<'a>(matches: &clap::ArgMatches<'a>) {
let info = serde_json::from_str::<TransactionInfo>(&cmd::arg_or_stdin(matches, "tx-info"))
.expect("invalid JSON provided");
let tx = create_transaction(info);
let tx_bytes = serialize(&tx);
if matches.is_present("raw-stdout") {
::std::io::stdout().write_all(&tx_bytes).unwrap();
} else {
print!("{}", hex::encode(&tx_bytes));
}
}
fn cmd_decode<'a>() -> clap::App<'a, 'a> {
cmd::subcommand("decode", "decode a raw transaction to JSON")
.args(&cmd::opts_networks())
.args(&[cmd::opt_yaml(), cmd::arg("raw-tx", "the raw transaction in hex").required(false)])
}
fn exec_decode<'a>(matches: &clap::ArgMatches<'a>) {
let hex_tx = cmd::arg_or_stdin(matches, "raw-tx");
let raw_tx = hex::decode(hex_tx.as_ref()).expect("could not decode raw tx");
let tx: Transaction = deserialize(&raw_tx).expect("invalid tx format");
let info = ::GetInfo::get_info(&tx, cmd::network(matches));
cmd::print_output(matches, &info)
}
| outpoint_from_input_info | identifier_name |
tx.rs | use std::convert::TryInto;
use std::io::Write;
use clap;
use bitcoin::hashes::Hash;
use bitcoin;
use elements::encode::{deserialize, serialize};
use elements::{
confidential, AssetIssuance, OutPoint, Transaction, TxIn, TxInWitness, TxOut, TxOutWitness,
Script,
};
use elements::secp256k1_zkp::{
Generator, PedersenCommitment, PublicKey, RangeProof, SurjectionProof, Tweak,
};
use cmd;
use hal_elements::Network;
use hal_elements::confidential::{
ConfidentialAssetInfo, ConfidentialNonceInfo, ConfidentialType, ConfidentialValueInfo,
};
use hal_elements::tx::{
AssetIssuanceInfo, InputInfo, InputWitnessInfo, OutputInfo, OutputWitnessInfo, PeginDataInfo,
PegoutDataInfo, TransactionInfo, InputScriptInfo, OutputScriptInfo,
};
pub fn subcommand<'a>() -> clap::App<'a, 'a> {
cmd::subcommand_group("tx", "manipulate transactions")
.subcommand(cmd_create())
.subcommand(cmd_decode())
}
pub fn execute<'a>(matches: &clap::ArgMatches<'a>) {
match matches.subcommand() {
("create", Some(ref m)) => exec_create(&m),
("decode", Some(ref m)) => exec_decode(&m),
(_, _) => unreachable!("clap prints help"),
};
}
fn cmd_create<'a>() -> clap::App<'a, 'a> {
cmd::subcommand("create", "create a raw transaction from JSON").args(&[
cmd::arg("tx-info", "the transaction info in JSON").required(false),
cmd::opt("raw-stdout", "output the raw bytes of the result to stdout")
.short("r")
.required(false),
])
}
/// Check both ways to specify the outpoint and panic if conflicting.
fn outpoint_from_input_info(input: &InputInfo) -> OutPoint {
let op1: Option<OutPoint> =
input.prevout.as_ref().map(|ref op| op.parse().expect("invalid prevout format"));
let op2 = match input.txid {
Some(txid) => match input.vout {
Some(vout) => Some(OutPoint {
txid: txid,
vout: vout,
}),
None => panic!("\"txid\" field given in input without \"vout\" field"),
},
None => None,
};
match (op1, op2) {
(Some(op1), Some(op2)) => {
if op1!= op2 {
panic!("Conflicting prevout information in input.");
}
op1
}
(Some(op), None) => op,
(None, Some(op)) => op,
(None, None) => panic!("No previous output provided in input."),
}
}
fn bytes_32(bytes: &[u8]) -> Option<[u8; 32]> {
if bytes.len()!= 32 {
None
} else {
let mut array = [0; 32];
for (x, y) in bytes.iter().zip(array.iter_mut()) {
*y = *x;
}
Some(array)
}
}
fn create_confidential_value(info: ConfidentialValueInfo) -> confidential::Value {
match info.type_ {
ConfidentialType::Null => confidential::Value::Null,
ConfidentialType::Explicit => confidential::Value::Explicit(
info.value.expect("Field \"value\" is required for explicit values."),
),
ConfidentialType::Confidential => {
let comm = PedersenCommitment::from_slice(
&info.commitment
.expect("Field \"commitment\" is required for confidential values.")
.0[..]
).expect("invalid confidential commitment");
confidential::Value::Confidential(comm)
}
}
}
fn create_confidential_asset(info: ConfidentialAssetInfo) -> confidential::Asset {
match info.type_ {
ConfidentialType::Null => confidential::Asset::Null,
ConfidentialType::Explicit => confidential::Asset::Explicit(
info.asset.expect("Field \"asset\" is required for explicit assets."),
),
ConfidentialType::Confidential => {
let gen = Generator::from_slice(
&info.commitment
.expect("Field \"commitment\" is required for confidential values.")
.0[..]
).expect("invalid confidential commitment");
confidential::Asset::Confidential(gen)
}
}
}
fn create_confidential_nonce(info: ConfidentialNonceInfo) -> confidential::Nonce {
match info.type_ {
ConfidentialType::Null => confidential::Nonce::Null,
ConfidentialType::Explicit => confidential::Nonce::Explicit(bytes_32(
&info.nonce
.expect("Field \"nonce\" is required for asset issuances.")
.0[..],
).expect("wrong size of \"nonce\" field")),
ConfidentialType::Confidential => {
let pubkey = PublicKey::from_slice(
&info.commitment
.expect("Field \"commitment\" is required for confidential values.")
.0[..]
).expect("invalid confidential commitment");
confidential::Nonce::Confidential(pubkey)
}
}
}
fn create_asset_issuance(info: AssetIssuanceInfo) -> AssetIssuance {
AssetIssuance {
asset_blinding_nonce: Tweak::from_slice(
&info.asset_blinding_nonce
.expect("Field \"asset_blinding_nonce\" is required for asset issuances.")
.0[..]
).expect("Invalid \"asset_blinding_nonce\"."),
asset_entropy: bytes_32(
&info.asset_entropy
.expect("Field \"asset_entropy\" is required for asset issuances.")
.0[..],
).expect("Invalid size of \"asset_entropy\"."),
amount: create_confidential_value(
info.amount.expect("Field \"amount\" is required for asset issuances."),
),
inflation_keys: create_confidential_value(
info.inflation_keys.expect("Field \"inflation_keys\" is required for asset issuances."),
),
}
}
fn create_script_sig(ss: InputScriptInfo) -> Script {
if let Some(hex) = ss.hex {
if ss.asm.is_some() {
warn!("Field \"asm\" of input is ignored.");
}
hex.0.into()
} else if let Some(_) = ss.asm {
panic!("Decoding script assembly is not yet supported.");
} else {
panic!("No scriptSig info provided.");
}
}
fn create_pegin_witness(pd: PeginDataInfo, prevout: bitcoin::OutPoint) -> Vec<Vec<u8>> {
if prevout!= pd.outpoint.parse().expect("Invalid outpoint in field \"pegin_data\".") {
panic!("Outpoint in \"pegin_data\" does not correspond to input value.");
}
let asset = match create_confidential_asset(pd.asset) {
confidential::Asset::Explicit(asset) => asset,
_ => panic!("Asset in \"pegin_data\" should be explicit."),
};
vec![
serialize(&pd.value),
serialize(&asset),
serialize(&pd.genesis_hash),
serialize(&pd.claim_script.0),
serialize(&pd.mainchain_tx_hex.0),
serialize(&pd.merkle_proof.0),
]
}
fn convert_outpoint_to_btc(p: elements::OutPoint) -> bitcoin::OutPoint {
bitcoin::OutPoint {
txid: bitcoin::Txid::from_inner(p.txid.into_inner()),
vout: p.vout,
}
}
fn create_input_witness(
info: Option<InputWitnessInfo>,
pd: Option<PeginDataInfo>,
prevout: OutPoint,
) -> TxInWitness {
let pegin_witness = if info.is_some() && info.as_ref().unwrap().pegin_witness.is_some() {
if pd.is_some() {
warn!("Field \"pegin_data\" of input is ignored.");
}
info.as_ref().unwrap().pegin_witness.clone().unwrap().iter().map(|h| h.clone().0).collect()
} else if let Some(pd) = pd {
create_pegin_witness(pd, convert_outpoint_to_btc(prevout))
} else {
Default::default()
};
if let Some(wi) = info {
TxInWitness {
amount_rangeproof: wi.amount_rangeproof
.map(|b| Box::new(RangeProof::from_slice(&b.0).expect("invalid rangeproof"))),
inflation_keys_rangeproof: wi.inflation_keys_rangeproof
.map(|b| Box::new(RangeProof::from_slice(&b.0).expect("invalid rangeproof"))),
script_witness: match wi.script_witness {
Some(ref w) => w.iter().map(|h| h.clone().0).collect(),
None => Vec::new(),
},
pegin_witness: pegin_witness,
}
} else {
TxInWitness {
pegin_witness: pegin_witness,
..Default::default()
}
}
}
fn create_input(input: InputInfo) -> TxIn {
let has_issuance = input.has_issuance.unwrap_or(input.asset_issuance.is_some());
let is_pegin = input.is_pegin.unwrap_or(input.pegin_data.is_some());
let prevout = outpoint_from_input_info(&input);
TxIn {
previous_output: prevout,
script_sig: input.script_sig.map(create_script_sig).unwrap_or_default(),
sequence: elements::Sequence::from_height(input.sequence.unwrap_or_default().try_into().unwrap()),
is_pegin: is_pegin,
asset_issuance: if has_issuance {
input.asset_issuance.map(create_asset_issuance).unwrap_or_default()
} else {
if input.asset_issuance.is_some() {
warn!("Field \"asset_issuance\" of input is ignored.");
}
Default::default()
},
witness: create_input_witness(input.witness, input.pegin_data, prevout),
}
}
fn create_script_pubkey(spk: OutputScriptInfo, used_network: &mut Option<Network>) -> Script {
if spk.type_.is_some() {
warn!("Field \"type\" of output is ignored.");
}
if let Some(hex) = spk.hex {
if spk.asm.is_some() {
warn!("Field \"asm\" of output is ignored.");
}
if spk.address.is_some() {
warn!("Field \"address\" of output is ignored.");
}
//TODO(stevenroose) do script sanity check to avoid blackhole?
hex.0.into()
} else if let Some(_) = spk.asm {
if spk.address.is_some() {
warn!("Field \"address\" of output is ignored.");
}
panic!("Decoding script assembly is not yet supported.");
} else if let Some(address) = spk.address {
// Error if another network had already been used.
if let Some(network) = Network::from_params(address.params) {
if used_network.replace(network).unwrap_or(network)!= network {
panic!("Addresses for different networks are used in the output scripts.");
}
}
address.script_pubkey()
} else {
panic!("No scriptPubKey info provided.");
}
}
fn create_bitcoin_script_pubkey(spk: hal::tx::OutputScriptInfo) -> bitcoin::Script {
if spk.type_.is_some() {
warn!("Field \"type\" of output is ignored.");
}
if let Some(hex) = spk.hex {
if spk.asm.is_some() {
warn!("Field \"asm\" of output is ignored.");
}
if spk.address.is_some() {
warn!("Field \"address\" of output is ignored.");
}
//TODO(stevenroose) do script sanity check to avoid blackhole?
hex.0.into()
} else if let Some(_) = spk.asm {
if spk.address.is_some() {
warn!("Field \"address\" of output is ignored.");
}
panic!("Decoding script assembly is not yet supported.");
} else if let Some(address) = spk.address {
address.script_pubkey()
} else {
panic!("No scriptPubKey info provided.");
}
}
fn create_output_witness(w: OutputWitnessInfo) -> TxOutWitness {
TxOutWitness {
surjection_proof: w.surjection_proof.map(|b| {
Box::new(SurjectionProof::from_slice(&b.0[..]).expect("invalid surjection proof"))
}),
rangeproof: w.rangeproof.map(|b| {
Box::new(RangeProof::from_slice(&b.0[..]).expect("invalid rangeproof"))
}),
}
}
fn create_script_pubkey_from_pegout_data(
pd: PegoutDataInfo,
) -> Script {
let mut builder = elements::script::Builder::new()
.push_opcode(elements::opcodes::all::OP_RETURN)
.push_slice(&pd.genesis_hash.into_inner()[..])
.push_slice(&create_bitcoin_script_pubkey(pd.script_pub_key)[..]);
for d in pd.extra_data {
builder = builder.push_slice(&d.0);
}
builder.into_script()
}
fn create_output(output: OutputInfo) -> TxOut {
// Keep track of which network has been used in addresses and error if two different networks
// are used.
let mut used_network = None;
let value = output
.value
.map(create_confidential_value)
.expect("Field \"value\" is required for outputs.");
let asset = output
.asset
.map(create_confidential_asset)
.expect("Field \"asset\" is required for outputs.");
TxOut {
asset: asset,
value: value,
nonce: output.nonce.map(create_confidential_nonce).unwrap_or(confidential::Nonce::Null),
script_pubkey: if let Some(spk) = output.script_pub_key {
if output.pegout_data.is_some() {
warn!("Field \"pegout_data\" of output is ignored.");
}
create_script_pubkey(spk, &mut used_network)
} else if let Some(pd) = output.pegout_data {
match value {
confidential::Value::Explicit(v) => {
if v!= pd.value {
panic!("Value in \"pegout_data\" does not correspond to output value.");
}
}
_ => panic!("Explicit value is required for pegout data."),
}
if asset!= create_confidential_asset(pd.asset.clone()) {
panic!("Asset in \"pegout_data\" does not correspond to output value.");
}
create_script_pubkey_from_pegout_data(pd)
} else {
Default::default()
},
witness: output.witness.map(create_output_witness).unwrap_or_default(),
}
}
pub fn create_transaction(info: TransactionInfo) -> Transaction {
// Fields that are ignored.
if info.txid.is_some() {
warn!("Field \"txid\" is ignored.");
}
if info.hash.is_some() {
warn!("Field \"hash\" is ignored.");
}
if info.size.is_some() {
warn!("Field \"size\" is ignored.");
}
if info.weight.is_some() {
warn!("Field \"weight\" is ignored.");
}
if info.vsize.is_some() {
warn!("Field \"vsize\" is ignored.");
}
Transaction {
version: info.version.expect("Field \"version\" is required."),
lock_time: elements::PackedLockTime(info.locktime.expect("Field \"locktime\" is required.")),
input: info
.inputs
.expect("Field \"inputs\" is required.")
.into_iter()
.map(create_input)
.collect(),
output: info
.outputs
.expect("Field \"outputs\" is required.")
.into_iter()
.map(create_output)
.collect(),
}
}
fn exec_create<'a>(matches: &clap::ArgMatches<'a>) {
let info = serde_json::from_str::<TransactionInfo>(&cmd::arg_or_stdin(matches, "tx-info"))
.expect("invalid JSON provided");
let tx = create_transaction(info);
let tx_bytes = serialize(&tx);
if matches.is_present("raw-stdout") { | } else {
print!("{}", hex::encode(&tx_bytes));
}
}
fn cmd_decode<'a>() -> clap::App<'a, 'a> {
cmd::subcommand("decode", "decode a raw transaction to JSON")
.args(&cmd::opts_networks())
.args(&[cmd::opt_yaml(), cmd::arg("raw-tx", "the raw transaction in hex").required(false)])
}
fn exec_decode<'a>(matches: &clap::ArgMatches<'a>) {
let hex_tx = cmd::arg_or_stdin(matches, "raw-tx");
let raw_tx = hex::decode(hex_tx.as_ref()).expect("could not decode raw tx");
let tx: Transaction = deserialize(&raw_tx).expect("invalid tx format");
let info = ::GetInfo::get_info(&tx, cmd::network(matches));
cmd::print_output(matches, &info)
} | ::std::io::stdout().write_all(&tx_bytes).unwrap(); | random_line_split |
term_gui.rs | // Copyright (c) The Swiboe development team. All rights reserved.
// Licensed under the Apache License, Version 2.0. See LICENSE.txt
// in the project root for license information.
#[macro_use]
extern crate clap;
extern crate rustbox;
extern crate serde_json;
extern crate subsequence_match;
extern crate swiboe;
extern crate swiboe_gui as gui;
extern crate time;
extern crate serde;
extern crate uuid;
use gui::buffer_views;
use serde::{Serialize, Deserialize};
use gui::keymap_handler;
use rustbox::{Color, RustBox};
use std::cmp;
use std::env;
use std::net;
use std::path;
use std::str::FromStr;
use std::sync::mpsc;
use std::sync::{RwLock, Arc};
use swiboe::client::{self, RpcCaller};
use uuid::Uuid;
fn clamp<T: Copy + cmp::Ord + std::fmt::Debug>(min: T, max: T, v: &mut T) {
let new_value = cmp::min(max, cmp::max(min, *v));
*v = new_value;
}
struct CompleterWidget {
candidates: subsequence_match::CandidateSet,
rpc: Option<client::rpc::client::Context>,
query: String,
results: Vec<subsequence_match::QueryResult>,
selection_index: isize,
}
enum CompleterState {
Running,
Canceled,
Selected(String),
}
impl CompleterWidget {
fn new(client: &mut client::Client) -> swiboe::Result<Self> {
// TODO(sirver): This should use the current work directory of the server, since the server
// might run on a different machine than the client - and certainly in a different
// directory.
let current_dir = env::current_dir().unwrap();
let rpc = try!(client.call("list_files", &swiboe::plugin::list_files::ListFilesRequest {
directory: current_dir.to_string_lossy().into_owned(),
}));
Ok(CompleterWidget {
candidates: subsequence_match::CandidateSet::new(),
rpc: Some(rpc),
query: "".into(),
results: Vec::new(),
selection_index: 0,
})
}
fn on_key(&mut self, key: rustbox::Key) -> CompleterState {
match key {
rustbox::Key::Char(c) => {
self.query.push(c);
self.results.clear();
CompleterState::Running
},
rustbox::Key::Backspace => {
self.query.pop();
self.results.clear();
CompleterState::Running
},
rustbox::Key::Down => {
self.selection_index += 1;
CompleterState::Running
},
rustbox::Key::Up => {
self.selection_index -= 1;
CompleterState::Running
},
rustbox::Key::Esc => {
self.rpc.take().unwrap().cancel().unwrap();
CompleterState::Canceled
},
rustbox::Key::Enter => {
self.rpc.take().unwrap().cancel().unwrap();
if self.results.is_empty() {
CompleterState::Canceled
} else {
clamp(0, self.results.len() as isize - 1, &mut self.selection_index);
CompleterState::Selected(self.results[self.selection_index as usize].text.clone())
}
}
_ => CompleterState::Running,
}
}
fn draw(&mut self, rustbox: &rustbox::RustBox) {
while let Some(b) = self.rpc.as_mut().unwrap().try_recv().unwrap() {
self.results.clear();
let b: swiboe::plugin::list_files::ListFilesUpdate = serde_json::from_value(b).unwrap();
for file in &b.files {
self.candidates.insert(file);
}
}
if self.results.is_empty() {
let query_to_use: String = self.query.chars().filter(|c|!c.is_whitespace()).collect();
self.candidates.query(&query_to_use, subsequence_match::MatchCase::No, &mut self.results);
}
if!self.results.is_empty() {
clamp(0, self.results.len() as isize - 1, &mut self.selection_index);
}
rustbox.print(0, 0, rustbox::RB_BOLD, Color::Yellow, Color::Default, &self.query);
let len_string = format!("{}/{} matching ({})", self.results.len(), self.candidates.len(),
if self.rpc.as_ref().unwrap().done() { "done" } else { "scanning" } );
rustbox.print(rustbox.width() - len_string.len() - 1, 0, rustbox::RB_BOLD, Color::Blue, Color::Default, &len_string);
let mut row = 1usize;
for result in &self.results {
let mut matching_indices = result.matching_indices.iter().peekable();
for (col, c) in result.text.chars().enumerate() {
let matches = match matching_indices.peek() {
Some(val) if **val == col => true,
_ => false,
};
let mut style = if matches {
matching_indices.next();
rustbox::RB_BOLD
} else {
rustbox::RB_NORMAL
};
if row as isize == self.selection_index + 1 {
style = style | rustbox::RB_REVERSE;
}
rustbox.print_char(col, row, style, Color::Default, Color::Default, c);
}
row += 1;
if row > rustbox.height() {
break;
}
}
}
}
struct BufferViewWidget {
view_id: String,
client: client::ThinClient,
cursor_id: String,
}
impl BufferViewWidget {
pub fn new(view_id: String, client: client::ThinClient) -> Self {
BufferViewWidget {
view_id: view_id,
client: client, |
fn draw(&mut self, buffer_view: &buffer_views::BufferView, rustbox: &rustbox::RustBox) {
let mut row = 0;
let top_line_index = buffer_view.top_line_index as usize;
self.cursor_id = buffer_view.cursor.id().to_string();
let mut cursor_drawn = false;
while row < rustbox.height() {
let line_index = top_line_index + row;
if let Some(line) = buffer_view.lines.get(line_index) {
for (col, c) in line.chars().enumerate() {
if col >= rustbox.width() {
break;
}
let bg = if buffer_view.cursor.position.line_index == line_index as isize &&
buffer_view.cursor.position.column_index as usize == col {
cursor_drawn = true;
Color::Red
} else {
Color::Default
};
rustbox.print_char(col, row, rustbox::RB_NORMAL, Color::Default, bg, c);
}
}
row += 1;
}
if!cursor_drawn {
let row = buffer_view.cursor.position.line_index - top_line_index as isize;
rustbox.print_char(buffer_view.cursor.position.column_index as usize,
row as usize, rustbox::RB_NORMAL,
Color::Default, Color::Red,'');
}
}
}
#[derive(Debug)]
struct Options {
socket: String,
config_file: path::PathBuf,
}
struct TerminalGui {
config_file_runner: Box<gui::config_file::ConfigFileRunner>,
client: client::Client,
rustbox: rustbox::RustBox,
buffer_views: Arc<RwLock<gui::buffer_views::BufferViews>>,
last_key_down_event: time::PreciseTime,
completer: Option<CompleterWidget>,
buffer_view_widget: Option<BufferViewWidget>,
// NOCOM(#sirver): GuiCommand in namespace gui is very duplicated
gui_commands: mpsc::Receiver<gui::command::GuiCommand>,
}
impl TerminalGui {
fn new(options: &Options) -> swiboe::Result<Self> {
let mut client = match net::SocketAddr::from_str(&options.socket) {
Ok(value) => {
client::Client::connect_tcp(&value).unwrap()
}
Err(_) => {
let socket_path = path::PathBuf::from(&options.socket);
client::Client::connect_unix(&socket_path).unwrap()
}
};
let mut config_file_runner = gui::config_file::ConfigFileRunner::new(
try!(client.clone()));
config_file_runner.run(&options.config_file);
let rustbox = match RustBox::init(rustbox::InitOptions {
input_mode: rustbox::InputMode::Current,
buffer_stderr: true,
}) {
Result::Ok(v) => v,
Result::Err(e) => panic!("{}", e),
};
let gui_id: String = Uuid::new_v4().to_hyphenated_string();
let (gui_commands_tx, gui_commands_rx) = mpsc::channel();
let buffer_views = try!(gui::buffer_views::BufferViews::new(&gui_id, gui_commands_tx, &mut client));
Ok(TerminalGui {
config_file_runner: config_file_runner,
client: client,
rustbox: rustbox,
buffer_views: buffer_views,
last_key_down_event: time::PreciseTime::now(),
completer: None,
buffer_view_widget: None,
gui_commands: gui_commands_rx,
})
}
fn handle_events(&mut self) -> swiboe::Result<bool> {
match self.rustbox.peek_event(std::time::Duration::from_millis(5), false) {
Ok(rustbox::Event::KeyEvent(key)) => {
if self.completer.is_some() {
let rv = self.completer.as_mut().unwrap().on_key(key);
match rv {
CompleterState::Running => (),
CompleterState::Canceled => {
self.completer = None;
},
CompleterState::Selected(result) => {
self.completer = None;
let mut rpc = try!(self.client.call("buffer.open", &swiboe::plugin::buffer::open::Request {
uri: format!("file://{}", result),
}));
let response: swiboe::plugin::buffer::open::Response = rpc.wait_for().unwrap();
let mut buffer_views = self.buffer_views.write().unwrap();
let view_id = buffer_views.new_view(response.buffer_index, self.rustbox.width(), self.rustbox.height());
self.buffer_view_widget = Some(BufferViewWidget::new(view_id, try!(self.client.clone())));
},
}
} else {
if!try!(self.handle_key(key)) {
return Ok(false);
}
}
},
Err(e) => panic!("{}", e),
_ => { }
}
while let Ok(command) = self.gui_commands.try_recv() {
match command {
gui::command::GuiCommand::Quit => return Ok(false),
gui::command::GuiCommand::Redraw => (),
}
}
return Ok(true);
}
fn handle_key(&mut self, key: rustbox::Key) -> swiboe::Result<bool> {
let delta_t = {
let now = time::PreciseTime::now();
let delta_t = self.last_key_down_event.to(now);
self.last_key_down_event = now;
delta_t
};
let delta_t_in_seconds = delta_t.num_nanoseconds().unwrap() as f64 / 1e9;
match key {
// NOCOM(#sirver): should be handled through plugins.
rustbox::Key::Char('q') => return Ok(false),
rustbox::Key::Ctrl('t') => {
self.completer = Some(try!(CompleterWidget::new(&mut self.client)))
},
rustbox::Key::Esc => {
self.config_file_runner.keymap_handler.timeout();
},
rustbox::Key::Char(a) => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Char(a));
},
rustbox::Key::Up => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Up);
},
rustbox::Key::Down => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Down);
},
rustbox::Key::Left => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Left);
},
rustbox::Key::Right => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Right);
},
rustbox::Key::Tab => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Tab);
},
rustbox::Key::Ctrl(some_other_key) => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Ctrl);
try!(self.handle_key(rustbox::Key::Char(some_other_key)));
}
_ => (),
}
Ok(true)
}
fn draw(&mut self) {
self.rustbox.clear();
if let Some(ref mut widget) = self.buffer_view_widget {
let buffer_views = self.buffer_views.read().unwrap();
let buffer_view = buffer_views.get(&widget.view_id).unwrap();
widget.draw(&buffer_view, &self.rustbox);
}
if let Some(ref mut completer) = self.completer {
completer.draw(&self.rustbox);
}
self.rustbox.present();
}
}
fn parse_options() -> Options {
let matches = clap::App::new("term_gui")
.about("Terminal client for Swiboe")
.version(&crate_version!()[..])
.arg(clap::Arg::with_name("SOCKET")
.short("s")
.long("socket")
.help("Socket at which the master listens.")
.required(true)
.takes_value(true))
.arg(clap::Arg::with_name("CONFIG_FILE")
.short("c")
.long("config_file")
.help("The config file to run when the GUI starts up.")
.takes_value(true))
.get_matches();
Options {
config_file: path::PathBuf::from(matches.value_of("CONFIG_FILE").unwrap_or("config.lua")),
socket: matches.value_of("SOCKET").unwrap().into(),
}
}
fn main() {
let options = parse_options();
let mut gui = TerminalGui::new(&options).unwrap();
while gui.handle_events().unwrap() {
gui.draw();
}
} | cursor_id: String::new(),
}
} | random_line_split |
term_gui.rs | // Copyright (c) The Swiboe development team. All rights reserved.
// Licensed under the Apache License, Version 2.0. See LICENSE.txt
// in the project root for license information.
#[macro_use]
extern crate clap;
extern crate rustbox;
extern crate serde_json;
extern crate subsequence_match;
extern crate swiboe;
extern crate swiboe_gui as gui;
extern crate time;
extern crate serde;
extern crate uuid;
use gui::buffer_views;
use serde::{Serialize, Deserialize};
use gui::keymap_handler;
use rustbox::{Color, RustBox};
use std::cmp;
use std::env;
use std::net;
use std::path;
use std::str::FromStr;
use std::sync::mpsc;
use std::sync::{RwLock, Arc};
use swiboe::client::{self, RpcCaller};
use uuid::Uuid;
fn clamp<T: Copy + cmp::Ord + std::fmt::Debug>(min: T, max: T, v: &mut T) {
let new_value = cmp::min(max, cmp::max(min, *v));
*v = new_value;
}
struct CompleterWidget {
candidates: subsequence_match::CandidateSet,
rpc: Option<client::rpc::client::Context>,
query: String,
results: Vec<subsequence_match::QueryResult>,
selection_index: isize,
}
enum CompleterState {
Running,
Canceled,
Selected(String),
}
impl CompleterWidget {
fn new(client: &mut client::Client) -> swiboe::Result<Self> {
// TODO(sirver): This should use the current work directory of the server, since the server
// might run on a different machine than the client - and certainly in a different
// directory.
let current_dir = env::current_dir().unwrap();
let rpc = try!(client.call("list_files", &swiboe::plugin::list_files::ListFilesRequest {
directory: current_dir.to_string_lossy().into_owned(),
}));
Ok(CompleterWidget {
candidates: subsequence_match::CandidateSet::new(),
rpc: Some(rpc),
query: "".into(),
results: Vec::new(),
selection_index: 0,
})
}
fn on_key(&mut self, key: rustbox::Key) -> CompleterState {
match key {
rustbox::Key::Char(c) => {
self.query.push(c);
self.results.clear();
CompleterState::Running
},
rustbox::Key::Backspace => {
self.query.pop();
self.results.clear();
CompleterState::Running
},
rustbox::Key::Down => {
self.selection_index += 1;
CompleterState::Running
},
rustbox::Key::Up => {
self.selection_index -= 1;
CompleterState::Running
},
rustbox::Key::Esc => {
self.rpc.take().unwrap().cancel().unwrap();
CompleterState::Canceled
},
rustbox::Key::Enter => {
self.rpc.take().unwrap().cancel().unwrap();
if self.results.is_empty() {
CompleterState::Canceled
} else {
clamp(0, self.results.len() as isize - 1, &mut self.selection_index);
CompleterState::Selected(self.results[self.selection_index as usize].text.clone())
}
}
_ => CompleterState::Running,
}
}
fn draw(&mut self, rustbox: &rustbox::RustBox) {
while let Some(b) = self.rpc.as_mut().unwrap().try_recv().unwrap() {
self.results.clear();
let b: swiboe::plugin::list_files::ListFilesUpdate = serde_json::from_value(b).unwrap();
for file in &b.files {
self.candidates.insert(file);
}
}
if self.results.is_empty() {
let query_to_use: String = self.query.chars().filter(|c|!c.is_whitespace()).collect();
self.candidates.query(&query_to_use, subsequence_match::MatchCase::No, &mut self.results);
}
if!self.results.is_empty() {
clamp(0, self.results.len() as isize - 1, &mut self.selection_index);
}
rustbox.print(0, 0, rustbox::RB_BOLD, Color::Yellow, Color::Default, &self.query);
let len_string = format!("{}/{} matching ({})", self.results.len(), self.candidates.len(),
if self.rpc.as_ref().unwrap().done() { "done" } else { "scanning" } );
rustbox.print(rustbox.width() - len_string.len() - 1, 0, rustbox::RB_BOLD, Color::Blue, Color::Default, &len_string);
let mut row = 1usize;
for result in &self.results {
let mut matching_indices = result.matching_indices.iter().peekable();
for (col, c) in result.text.chars().enumerate() {
let matches = match matching_indices.peek() {
Some(val) if **val == col => true,
_ => false,
};
let mut style = if matches {
matching_indices.next();
rustbox::RB_BOLD
} else {
rustbox::RB_NORMAL
};
if row as isize == self.selection_index + 1 {
style = style | rustbox::RB_REVERSE;
}
rustbox.print_char(col, row, style, Color::Default, Color::Default, c);
}
row += 1;
if row > rustbox.height() {
break;
}
}
}
}
struct BufferViewWidget {
view_id: String,
client: client::ThinClient,
cursor_id: String,
}
impl BufferViewWidget {
pub fn new(view_id: String, client: client::ThinClient) -> Self {
BufferViewWidget {
view_id: view_id,
client: client,
cursor_id: String::new(),
}
}
fn draw(&mut self, buffer_view: &buffer_views::BufferView, rustbox: &rustbox::RustBox) {
let mut row = 0;
let top_line_index = buffer_view.top_line_index as usize;
self.cursor_id = buffer_view.cursor.id().to_string();
let mut cursor_drawn = false;
while row < rustbox.height() {
let line_index = top_line_index + row;
if let Some(line) = buffer_view.lines.get(line_index) {
for (col, c) in line.chars().enumerate() {
if col >= rustbox.width() {
break;
}
let bg = if buffer_view.cursor.position.line_index == line_index as isize &&
buffer_view.cursor.position.column_index as usize == col {
cursor_drawn = true;
Color::Red
} else {
Color::Default
};
rustbox.print_char(col, row, rustbox::RB_NORMAL, Color::Default, bg, c);
}
}
row += 1;
}
if!cursor_drawn {
let row = buffer_view.cursor.position.line_index - top_line_index as isize;
rustbox.print_char(buffer_view.cursor.position.column_index as usize,
row as usize, rustbox::RB_NORMAL,
Color::Default, Color::Red,'');
}
}
}
#[derive(Debug)]
struct Options {
socket: String,
config_file: path::PathBuf,
}
struct TerminalGui {
config_file_runner: Box<gui::config_file::ConfigFileRunner>,
client: client::Client,
rustbox: rustbox::RustBox,
buffer_views: Arc<RwLock<gui::buffer_views::BufferViews>>,
last_key_down_event: time::PreciseTime,
completer: Option<CompleterWidget>,
buffer_view_widget: Option<BufferViewWidget>,
// NOCOM(#sirver): GuiCommand in namespace gui is very duplicated
gui_commands: mpsc::Receiver<gui::command::GuiCommand>,
}
impl TerminalGui {
fn new(options: &Options) -> swiboe::Result<Self> {
let mut client = match net::SocketAddr::from_str(&options.socket) {
Ok(value) => {
client::Client::connect_tcp(&value).unwrap()
}
Err(_) => {
let socket_path = path::PathBuf::from(&options.socket);
client::Client::connect_unix(&socket_path).unwrap()
}
};
let mut config_file_runner = gui::config_file::ConfigFileRunner::new(
try!(client.clone()));
config_file_runner.run(&options.config_file);
let rustbox = match RustBox::init(rustbox::InitOptions {
input_mode: rustbox::InputMode::Current,
buffer_stderr: true,
}) {
Result::Ok(v) => v,
Result::Err(e) => panic!("{}", e),
};
let gui_id: String = Uuid::new_v4().to_hyphenated_string();
let (gui_commands_tx, gui_commands_rx) = mpsc::channel();
let buffer_views = try!(gui::buffer_views::BufferViews::new(&gui_id, gui_commands_tx, &mut client));
Ok(TerminalGui {
config_file_runner: config_file_runner,
client: client,
rustbox: rustbox,
buffer_views: buffer_views,
last_key_down_event: time::PreciseTime::now(),
completer: None,
buffer_view_widget: None,
gui_commands: gui_commands_rx,
})
}
fn handle_events(&mut self) -> swiboe::Result<bool> {
match self.rustbox.peek_event(std::time::Duration::from_millis(5), false) {
Ok(rustbox::Event::KeyEvent(key)) => {
if self.completer.is_some() {
let rv = self.completer.as_mut().unwrap().on_key(key);
match rv {
CompleterState::Running => (),
CompleterState::Canceled => {
self.completer = None;
},
CompleterState::Selected(result) => {
self.completer = None;
let mut rpc = try!(self.client.call("buffer.open", &swiboe::plugin::buffer::open::Request {
uri: format!("file://{}", result),
}));
let response: swiboe::plugin::buffer::open::Response = rpc.wait_for().unwrap();
let mut buffer_views = self.buffer_views.write().unwrap();
let view_id = buffer_views.new_view(response.buffer_index, self.rustbox.width(), self.rustbox.height());
self.buffer_view_widget = Some(BufferViewWidget::new(view_id, try!(self.client.clone())));
},
}
} else {
if!try!(self.handle_key(key)) {
return Ok(false);
}
}
},
Err(e) => panic!("{}", e),
_ => { }
}
while let Ok(command) = self.gui_commands.try_recv() {
match command {
gui::command::GuiCommand::Quit => return Ok(false),
gui::command::GuiCommand::Redraw => (),
}
}
return Ok(true);
}
fn handle_key(&mut self, key: rustbox::Key) -> swiboe::Result<bool> {
let delta_t = {
let now = time::PreciseTime::now();
let delta_t = self.last_key_down_event.to(now);
self.last_key_down_event = now;
delta_t
};
let delta_t_in_seconds = delta_t.num_nanoseconds().unwrap() as f64 / 1e9;
match key {
// NOCOM(#sirver): should be handled through plugins.
rustbox::Key::Char('q') => return Ok(false),
rustbox::Key::Ctrl('t') => {
self.completer = Some(try!(CompleterWidget::new(&mut self.client)))
},
rustbox::Key::Esc => {
self.config_file_runner.keymap_handler.timeout();
},
rustbox::Key::Char(a) => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Char(a));
},
rustbox::Key::Up => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Up);
},
rustbox::Key::Down => | ,
rustbox::Key::Left => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Left);
},
rustbox::Key::Right => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Right);
},
rustbox::Key::Tab => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Tab);
},
rustbox::Key::Ctrl(some_other_key) => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Ctrl);
try!(self.handle_key(rustbox::Key::Char(some_other_key)));
}
_ => (),
}
Ok(true)
}
fn draw(&mut self) {
self.rustbox.clear();
if let Some(ref mut widget) = self.buffer_view_widget {
let buffer_views = self.buffer_views.read().unwrap();
let buffer_view = buffer_views.get(&widget.view_id).unwrap();
widget.draw(&buffer_view, &self.rustbox);
}
if let Some(ref mut completer) = self.completer {
completer.draw(&self.rustbox);
}
self.rustbox.present();
}
}
fn parse_options() -> Options {
let matches = clap::App::new("term_gui")
.about("Terminal client for Swiboe")
.version(&crate_version!()[..])
.arg(clap::Arg::with_name("SOCKET")
.short("s")
.long("socket")
.help("Socket at which the master listens.")
.required(true)
.takes_value(true))
.arg(clap::Arg::with_name("CONFIG_FILE")
.short("c")
.long("config_file")
.help("The config file to run when the GUI starts up.")
.takes_value(true))
.get_matches();
Options {
config_file: path::PathBuf::from(matches.value_of("CONFIG_FILE").unwrap_or("config.lua")),
socket: matches.value_of("SOCKET").unwrap().into(),
}
}
fn main() {
let options = parse_options();
let mut gui = TerminalGui::new(&options).unwrap();
while gui.handle_events().unwrap() {
gui.draw();
}
}
| {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Down);
} | conditional_block |
term_gui.rs | // Copyright (c) The Swiboe development team. All rights reserved.
// Licensed under the Apache License, Version 2.0. See LICENSE.txt
// in the project root for license information.
#[macro_use]
extern crate clap;
extern crate rustbox;
extern crate serde_json;
extern crate subsequence_match;
extern crate swiboe;
extern crate swiboe_gui as gui;
extern crate time;
extern crate serde;
extern crate uuid;
use gui::buffer_views;
use serde::{Serialize, Deserialize};
use gui::keymap_handler;
use rustbox::{Color, RustBox};
use std::cmp;
use std::env;
use std::net;
use std::path;
use std::str::FromStr;
use std::sync::mpsc;
use std::sync::{RwLock, Arc};
use swiboe::client::{self, RpcCaller};
use uuid::Uuid;
fn clamp<T: Copy + cmp::Ord + std::fmt::Debug>(min: T, max: T, v: &mut T) {
let new_value = cmp::min(max, cmp::max(min, *v));
*v = new_value;
}
struct CompleterWidget {
candidates: subsequence_match::CandidateSet,
rpc: Option<client::rpc::client::Context>,
query: String,
results: Vec<subsequence_match::QueryResult>,
selection_index: isize,
}
enum CompleterState {
Running,
Canceled,
Selected(String),
}
impl CompleterWidget {
fn new(client: &mut client::Client) -> swiboe::Result<Self> {
// TODO(sirver): This should use the current work directory of the server, since the server
// might run on a different machine than the client - and certainly in a different
// directory.
let current_dir = env::current_dir().unwrap();
let rpc = try!(client.call("list_files", &swiboe::plugin::list_files::ListFilesRequest {
directory: current_dir.to_string_lossy().into_owned(),
}));
Ok(CompleterWidget {
candidates: subsequence_match::CandidateSet::new(),
rpc: Some(rpc),
query: "".into(),
results: Vec::new(),
selection_index: 0,
})
}
fn on_key(&mut self, key: rustbox::Key) -> CompleterState {
match key {
rustbox::Key::Char(c) => {
self.query.push(c);
self.results.clear();
CompleterState::Running
},
rustbox::Key::Backspace => {
self.query.pop();
self.results.clear();
CompleterState::Running
},
rustbox::Key::Down => {
self.selection_index += 1;
CompleterState::Running
},
rustbox::Key::Up => {
self.selection_index -= 1;
CompleterState::Running
},
rustbox::Key::Esc => {
self.rpc.take().unwrap().cancel().unwrap();
CompleterState::Canceled
},
rustbox::Key::Enter => {
self.rpc.take().unwrap().cancel().unwrap();
if self.results.is_empty() {
CompleterState::Canceled
} else {
clamp(0, self.results.len() as isize - 1, &mut self.selection_index);
CompleterState::Selected(self.results[self.selection_index as usize].text.clone())
}
}
_ => CompleterState::Running,
}
}
fn draw(&mut self, rustbox: &rustbox::RustBox) {
while let Some(b) = self.rpc.as_mut().unwrap().try_recv().unwrap() {
self.results.clear();
let b: swiboe::plugin::list_files::ListFilesUpdate = serde_json::from_value(b).unwrap();
for file in &b.files {
self.candidates.insert(file);
}
}
if self.results.is_empty() {
let query_to_use: String = self.query.chars().filter(|c|!c.is_whitespace()).collect();
self.candidates.query(&query_to_use, subsequence_match::MatchCase::No, &mut self.results);
}
if!self.results.is_empty() {
clamp(0, self.results.len() as isize - 1, &mut self.selection_index);
}
rustbox.print(0, 0, rustbox::RB_BOLD, Color::Yellow, Color::Default, &self.query);
let len_string = format!("{}/{} matching ({})", self.results.len(), self.candidates.len(),
if self.rpc.as_ref().unwrap().done() { "done" } else { "scanning" } );
rustbox.print(rustbox.width() - len_string.len() - 1, 0, rustbox::RB_BOLD, Color::Blue, Color::Default, &len_string);
let mut row = 1usize;
for result in &self.results {
let mut matching_indices = result.matching_indices.iter().peekable();
for (col, c) in result.text.chars().enumerate() {
let matches = match matching_indices.peek() {
Some(val) if **val == col => true,
_ => false,
};
let mut style = if matches {
matching_indices.next();
rustbox::RB_BOLD
} else {
rustbox::RB_NORMAL
};
if row as isize == self.selection_index + 1 {
style = style | rustbox::RB_REVERSE;
}
rustbox.print_char(col, row, style, Color::Default, Color::Default, c);
}
row += 1;
if row > rustbox.height() {
break;
}
}
}
}
struct BufferViewWidget {
view_id: String,
client: client::ThinClient,
cursor_id: String,
}
impl BufferViewWidget {
pub fn new(view_id: String, client: client::ThinClient) -> Self {
BufferViewWidget {
view_id: view_id,
client: client,
cursor_id: String::new(),
}
}
fn draw(&mut self, buffer_view: &buffer_views::BufferView, rustbox: &rustbox::RustBox) {
let mut row = 0;
let top_line_index = buffer_view.top_line_index as usize;
self.cursor_id = buffer_view.cursor.id().to_string();
let mut cursor_drawn = false;
while row < rustbox.height() {
let line_index = top_line_index + row;
if let Some(line) = buffer_view.lines.get(line_index) {
for (col, c) in line.chars().enumerate() {
if col >= rustbox.width() {
break;
}
let bg = if buffer_view.cursor.position.line_index == line_index as isize &&
buffer_view.cursor.position.column_index as usize == col {
cursor_drawn = true;
Color::Red
} else {
Color::Default
};
rustbox.print_char(col, row, rustbox::RB_NORMAL, Color::Default, bg, c);
}
}
row += 1;
}
if!cursor_drawn {
let row = buffer_view.cursor.position.line_index - top_line_index as isize;
rustbox.print_char(buffer_view.cursor.position.column_index as usize,
row as usize, rustbox::RB_NORMAL,
Color::Default, Color::Red,'');
}
}
}
#[derive(Debug)]
struct Options {
socket: String,
config_file: path::PathBuf,
}
struct TerminalGui {
config_file_runner: Box<gui::config_file::ConfigFileRunner>,
client: client::Client,
rustbox: rustbox::RustBox,
buffer_views: Arc<RwLock<gui::buffer_views::BufferViews>>,
last_key_down_event: time::PreciseTime,
completer: Option<CompleterWidget>,
buffer_view_widget: Option<BufferViewWidget>,
// NOCOM(#sirver): GuiCommand in namespace gui is very duplicated
gui_commands: mpsc::Receiver<gui::command::GuiCommand>,
}
impl TerminalGui {
fn new(options: &Options) -> swiboe::Result<Self> {
let mut client = match net::SocketAddr::from_str(&options.socket) {
Ok(value) => {
client::Client::connect_tcp(&value).unwrap()
}
Err(_) => {
let socket_path = path::PathBuf::from(&options.socket);
client::Client::connect_unix(&socket_path).unwrap()
}
};
let mut config_file_runner = gui::config_file::ConfigFileRunner::new(
try!(client.clone()));
config_file_runner.run(&options.config_file);
let rustbox = match RustBox::init(rustbox::InitOptions {
input_mode: rustbox::InputMode::Current,
buffer_stderr: true,
}) {
Result::Ok(v) => v,
Result::Err(e) => panic!("{}", e),
};
let gui_id: String = Uuid::new_v4().to_hyphenated_string();
let (gui_commands_tx, gui_commands_rx) = mpsc::channel();
let buffer_views = try!(gui::buffer_views::BufferViews::new(&gui_id, gui_commands_tx, &mut client));
Ok(TerminalGui {
config_file_runner: config_file_runner,
client: client,
rustbox: rustbox,
buffer_views: buffer_views,
last_key_down_event: time::PreciseTime::now(),
completer: None,
buffer_view_widget: None,
gui_commands: gui_commands_rx,
})
}
fn handle_events(&mut self) -> swiboe::Result<bool> {
match self.rustbox.peek_event(std::time::Duration::from_millis(5), false) {
Ok(rustbox::Event::KeyEvent(key)) => {
if self.completer.is_some() {
let rv = self.completer.as_mut().unwrap().on_key(key);
match rv {
CompleterState::Running => (),
CompleterState::Canceled => {
self.completer = None;
},
CompleterState::Selected(result) => {
self.completer = None;
let mut rpc = try!(self.client.call("buffer.open", &swiboe::plugin::buffer::open::Request {
uri: format!("file://{}", result),
}));
let response: swiboe::plugin::buffer::open::Response = rpc.wait_for().unwrap();
let mut buffer_views = self.buffer_views.write().unwrap();
let view_id = buffer_views.new_view(response.buffer_index, self.rustbox.width(), self.rustbox.height());
self.buffer_view_widget = Some(BufferViewWidget::new(view_id, try!(self.client.clone())));
},
}
} else {
if!try!(self.handle_key(key)) {
return Ok(false);
}
}
},
Err(e) => panic!("{}", e),
_ => { }
}
while let Ok(command) = self.gui_commands.try_recv() {
match command {
gui::command::GuiCommand::Quit => return Ok(false),
gui::command::GuiCommand::Redraw => (),
}
}
return Ok(true);
}
fn handle_key(&mut self, key: rustbox::Key) -> swiboe::Result<bool> {
let delta_t = {
let now = time::PreciseTime::now();
let delta_t = self.last_key_down_event.to(now);
self.last_key_down_event = now;
delta_t
};
let delta_t_in_seconds = delta_t.num_nanoseconds().unwrap() as f64 / 1e9;
match key {
// NOCOM(#sirver): should be handled through plugins.
rustbox::Key::Char('q') => return Ok(false),
rustbox::Key::Ctrl('t') => {
self.completer = Some(try!(CompleterWidget::new(&mut self.client)))
},
rustbox::Key::Esc => {
self.config_file_runner.keymap_handler.timeout();
},
rustbox::Key::Char(a) => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Char(a));
},
rustbox::Key::Up => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Up);
},
rustbox::Key::Down => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Down);
},
rustbox::Key::Left => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Left);
},
rustbox::Key::Right => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Right);
},
rustbox::Key::Tab => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Tab);
},
rustbox::Key::Ctrl(some_other_key) => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Ctrl);
try!(self.handle_key(rustbox::Key::Char(some_other_key)));
}
_ => (),
}
Ok(true)
}
fn draw(&mut self) {
self.rustbox.clear();
if let Some(ref mut widget) = self.buffer_view_widget {
let buffer_views = self.buffer_views.read().unwrap();
let buffer_view = buffer_views.get(&widget.view_id).unwrap();
widget.draw(&buffer_view, &self.rustbox);
}
if let Some(ref mut completer) = self.completer {
completer.draw(&self.rustbox);
}
self.rustbox.present();
}
}
fn parse_options() -> Options {
let matches = clap::App::new("term_gui")
.about("Terminal client for Swiboe")
.version(&crate_version!()[..])
.arg(clap::Arg::with_name("SOCKET")
.short("s")
.long("socket")
.help("Socket at which the master listens.")
.required(true)
.takes_value(true))
.arg(clap::Arg::with_name("CONFIG_FILE")
.short("c")
.long("config_file")
.help("The config file to run when the GUI starts up.")
.takes_value(true))
.get_matches();
Options {
config_file: path::PathBuf::from(matches.value_of("CONFIG_FILE").unwrap_or("config.lua")),
socket: matches.value_of("SOCKET").unwrap().into(),
}
}
fn main() | {
let options = parse_options();
let mut gui = TerminalGui::new(&options).unwrap();
while gui.handle_events().unwrap() {
gui.draw();
}
} | identifier_body |
|
term_gui.rs | // Copyright (c) The Swiboe development team. All rights reserved.
// Licensed under the Apache License, Version 2.0. See LICENSE.txt
// in the project root for license information.
#[macro_use]
extern crate clap;
extern crate rustbox;
extern crate serde_json;
extern crate subsequence_match;
extern crate swiboe;
extern crate swiboe_gui as gui;
extern crate time;
extern crate serde;
extern crate uuid;
use gui::buffer_views;
use serde::{Serialize, Deserialize};
use gui::keymap_handler;
use rustbox::{Color, RustBox};
use std::cmp;
use std::env;
use std::net;
use std::path;
use std::str::FromStr;
use std::sync::mpsc;
use std::sync::{RwLock, Arc};
use swiboe::client::{self, RpcCaller};
use uuid::Uuid;
fn clamp<T: Copy + cmp::Ord + std::fmt::Debug>(min: T, max: T, v: &mut T) {
let new_value = cmp::min(max, cmp::max(min, *v));
*v = new_value;
}
struct CompleterWidget {
candidates: subsequence_match::CandidateSet,
rpc: Option<client::rpc::client::Context>,
query: String,
results: Vec<subsequence_match::QueryResult>,
selection_index: isize,
}
enum CompleterState {
Running,
Canceled,
Selected(String),
}
impl CompleterWidget {
fn new(client: &mut client::Client) -> swiboe::Result<Self> {
// TODO(sirver): This should use the current work directory of the server, since the server
// might run on a different machine than the client - and certainly in a different
// directory.
let current_dir = env::current_dir().unwrap();
let rpc = try!(client.call("list_files", &swiboe::plugin::list_files::ListFilesRequest {
directory: current_dir.to_string_lossy().into_owned(),
}));
Ok(CompleterWidget {
candidates: subsequence_match::CandidateSet::new(),
rpc: Some(rpc),
query: "".into(),
results: Vec::new(),
selection_index: 0,
})
}
fn | (&mut self, key: rustbox::Key) -> CompleterState {
match key {
rustbox::Key::Char(c) => {
self.query.push(c);
self.results.clear();
CompleterState::Running
},
rustbox::Key::Backspace => {
self.query.pop();
self.results.clear();
CompleterState::Running
},
rustbox::Key::Down => {
self.selection_index += 1;
CompleterState::Running
},
rustbox::Key::Up => {
self.selection_index -= 1;
CompleterState::Running
},
rustbox::Key::Esc => {
self.rpc.take().unwrap().cancel().unwrap();
CompleterState::Canceled
},
rustbox::Key::Enter => {
self.rpc.take().unwrap().cancel().unwrap();
if self.results.is_empty() {
CompleterState::Canceled
} else {
clamp(0, self.results.len() as isize - 1, &mut self.selection_index);
CompleterState::Selected(self.results[self.selection_index as usize].text.clone())
}
}
_ => CompleterState::Running,
}
}
fn draw(&mut self, rustbox: &rustbox::RustBox) {
while let Some(b) = self.rpc.as_mut().unwrap().try_recv().unwrap() {
self.results.clear();
let b: swiboe::plugin::list_files::ListFilesUpdate = serde_json::from_value(b).unwrap();
for file in &b.files {
self.candidates.insert(file);
}
}
if self.results.is_empty() {
let query_to_use: String = self.query.chars().filter(|c|!c.is_whitespace()).collect();
self.candidates.query(&query_to_use, subsequence_match::MatchCase::No, &mut self.results);
}
if!self.results.is_empty() {
clamp(0, self.results.len() as isize - 1, &mut self.selection_index);
}
rustbox.print(0, 0, rustbox::RB_BOLD, Color::Yellow, Color::Default, &self.query);
let len_string = format!("{}/{} matching ({})", self.results.len(), self.candidates.len(),
if self.rpc.as_ref().unwrap().done() { "done" } else { "scanning" } );
rustbox.print(rustbox.width() - len_string.len() - 1, 0, rustbox::RB_BOLD, Color::Blue, Color::Default, &len_string);
let mut row = 1usize;
for result in &self.results {
let mut matching_indices = result.matching_indices.iter().peekable();
for (col, c) in result.text.chars().enumerate() {
let matches = match matching_indices.peek() {
Some(val) if **val == col => true,
_ => false,
};
let mut style = if matches {
matching_indices.next();
rustbox::RB_BOLD
} else {
rustbox::RB_NORMAL
};
if row as isize == self.selection_index + 1 {
style = style | rustbox::RB_REVERSE;
}
rustbox.print_char(col, row, style, Color::Default, Color::Default, c);
}
row += 1;
if row > rustbox.height() {
break;
}
}
}
}
struct BufferViewWidget {
view_id: String,
client: client::ThinClient,
cursor_id: String,
}
impl BufferViewWidget {
pub fn new(view_id: String, client: client::ThinClient) -> Self {
BufferViewWidget {
view_id: view_id,
client: client,
cursor_id: String::new(),
}
}
fn draw(&mut self, buffer_view: &buffer_views::BufferView, rustbox: &rustbox::RustBox) {
let mut row = 0;
let top_line_index = buffer_view.top_line_index as usize;
self.cursor_id = buffer_view.cursor.id().to_string();
let mut cursor_drawn = false;
while row < rustbox.height() {
let line_index = top_line_index + row;
if let Some(line) = buffer_view.lines.get(line_index) {
for (col, c) in line.chars().enumerate() {
if col >= rustbox.width() {
break;
}
let bg = if buffer_view.cursor.position.line_index == line_index as isize &&
buffer_view.cursor.position.column_index as usize == col {
cursor_drawn = true;
Color::Red
} else {
Color::Default
};
rustbox.print_char(col, row, rustbox::RB_NORMAL, Color::Default, bg, c);
}
}
row += 1;
}
if!cursor_drawn {
let row = buffer_view.cursor.position.line_index - top_line_index as isize;
rustbox.print_char(buffer_view.cursor.position.column_index as usize,
row as usize, rustbox::RB_NORMAL,
Color::Default, Color::Red,'');
}
}
}
#[derive(Debug)]
struct Options {
socket: String,
config_file: path::PathBuf,
}
struct TerminalGui {
config_file_runner: Box<gui::config_file::ConfigFileRunner>,
client: client::Client,
rustbox: rustbox::RustBox,
buffer_views: Arc<RwLock<gui::buffer_views::BufferViews>>,
last_key_down_event: time::PreciseTime,
completer: Option<CompleterWidget>,
buffer_view_widget: Option<BufferViewWidget>,
// NOCOM(#sirver): GuiCommand in namespace gui is very duplicated
gui_commands: mpsc::Receiver<gui::command::GuiCommand>,
}
impl TerminalGui {
fn new(options: &Options) -> swiboe::Result<Self> {
let mut client = match net::SocketAddr::from_str(&options.socket) {
Ok(value) => {
client::Client::connect_tcp(&value).unwrap()
}
Err(_) => {
let socket_path = path::PathBuf::from(&options.socket);
client::Client::connect_unix(&socket_path).unwrap()
}
};
let mut config_file_runner = gui::config_file::ConfigFileRunner::new(
try!(client.clone()));
config_file_runner.run(&options.config_file);
let rustbox = match RustBox::init(rustbox::InitOptions {
input_mode: rustbox::InputMode::Current,
buffer_stderr: true,
}) {
Result::Ok(v) => v,
Result::Err(e) => panic!("{}", e),
};
let gui_id: String = Uuid::new_v4().to_hyphenated_string();
let (gui_commands_tx, gui_commands_rx) = mpsc::channel();
let buffer_views = try!(gui::buffer_views::BufferViews::new(&gui_id, gui_commands_tx, &mut client));
Ok(TerminalGui {
config_file_runner: config_file_runner,
client: client,
rustbox: rustbox,
buffer_views: buffer_views,
last_key_down_event: time::PreciseTime::now(),
completer: None,
buffer_view_widget: None,
gui_commands: gui_commands_rx,
})
}
fn handle_events(&mut self) -> swiboe::Result<bool> {
match self.rustbox.peek_event(std::time::Duration::from_millis(5), false) {
Ok(rustbox::Event::KeyEvent(key)) => {
if self.completer.is_some() {
let rv = self.completer.as_mut().unwrap().on_key(key);
match rv {
CompleterState::Running => (),
CompleterState::Canceled => {
self.completer = None;
},
CompleterState::Selected(result) => {
self.completer = None;
let mut rpc = try!(self.client.call("buffer.open", &swiboe::plugin::buffer::open::Request {
uri: format!("file://{}", result),
}));
let response: swiboe::plugin::buffer::open::Response = rpc.wait_for().unwrap();
let mut buffer_views = self.buffer_views.write().unwrap();
let view_id = buffer_views.new_view(response.buffer_index, self.rustbox.width(), self.rustbox.height());
self.buffer_view_widget = Some(BufferViewWidget::new(view_id, try!(self.client.clone())));
},
}
} else {
if!try!(self.handle_key(key)) {
return Ok(false);
}
}
},
Err(e) => panic!("{}", e),
_ => { }
}
while let Ok(command) = self.gui_commands.try_recv() {
match command {
gui::command::GuiCommand::Quit => return Ok(false),
gui::command::GuiCommand::Redraw => (),
}
}
return Ok(true);
}
fn handle_key(&mut self, key: rustbox::Key) -> swiboe::Result<bool> {
let delta_t = {
let now = time::PreciseTime::now();
let delta_t = self.last_key_down_event.to(now);
self.last_key_down_event = now;
delta_t
};
let delta_t_in_seconds = delta_t.num_nanoseconds().unwrap() as f64 / 1e9;
match key {
// NOCOM(#sirver): should be handled through plugins.
rustbox::Key::Char('q') => return Ok(false),
rustbox::Key::Ctrl('t') => {
self.completer = Some(try!(CompleterWidget::new(&mut self.client)))
},
rustbox::Key::Esc => {
self.config_file_runner.keymap_handler.timeout();
},
rustbox::Key::Char(a) => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Char(a));
},
rustbox::Key::Up => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Up);
},
rustbox::Key::Down => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Down);
},
rustbox::Key::Left => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Left);
},
rustbox::Key::Right => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Right);
},
rustbox::Key::Tab => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Tab);
},
rustbox::Key::Ctrl(some_other_key) => {
self.config_file_runner.keymap_handler.key_down(
delta_t_in_seconds, keymap_handler::Key::Ctrl);
try!(self.handle_key(rustbox::Key::Char(some_other_key)));
}
_ => (),
}
Ok(true)
}
fn draw(&mut self) {
self.rustbox.clear();
if let Some(ref mut widget) = self.buffer_view_widget {
let buffer_views = self.buffer_views.read().unwrap();
let buffer_view = buffer_views.get(&widget.view_id).unwrap();
widget.draw(&buffer_view, &self.rustbox);
}
if let Some(ref mut completer) = self.completer {
completer.draw(&self.rustbox);
}
self.rustbox.present();
}
}
fn parse_options() -> Options {
let matches = clap::App::new("term_gui")
.about("Terminal client for Swiboe")
.version(&crate_version!()[..])
.arg(clap::Arg::with_name("SOCKET")
.short("s")
.long("socket")
.help("Socket at which the master listens.")
.required(true)
.takes_value(true))
.arg(clap::Arg::with_name("CONFIG_FILE")
.short("c")
.long("config_file")
.help("The config file to run when the GUI starts up.")
.takes_value(true))
.get_matches();
Options {
config_file: path::PathBuf::from(matches.value_of("CONFIG_FILE").unwrap_or("config.lua")),
socket: matches.value_of("SOCKET").unwrap().into(),
}
}
fn main() {
let options = parse_options();
let mut gui = TerminalGui::new(&options).unwrap();
while gui.handle_events().unwrap() {
gui.draw();
}
}
| on_key | identifier_name |
write.rs | use std::{
cmp,
convert::TryInto,
io::{self, Write},
};
use byteorder::{ByteOrder, LittleEndian, WriteBytesExt};
use cgmath::prelude::*;
use crate::{
prelude::*,
io::{PropKind, Error, ErrorKind},
};
use super::{Encoding, RawTriangle};
// ----------------------------------------------------------------------------
/// The solid name used when the user didn't specify one.
const DEFAULT_SOLID_NAME: &str = "mesh";
// ===============================================================================================
// ===== STL Config
// ===============================================================================================
/// Used to configure and create a [`Writer`].
///
/// This is used to configure basic settings for the file to be written. You
/// can use the [`Config::into_writer`] method to create a [`Writer`] that can
/// be used as streaming sink.
#[derive(Clone, Debug)]
pub struct Config {
solid_name: String,
encoding: Encoding,
}
impl Config {
/// Creates a new builder instance from the given encoding. For
/// convenience, you can use [`Config::binary()`] or [`Config::ascii()`]
/// directly.
pub fn new(encoding: Encoding) -> Self {
Self {
solid_name: DEFAULT_SOLID_NAME.into(),
encoding,
}
}
/// Creates a new builder instance for a binary STL file.
pub fn | () -> Self {
Self::new(Encoding::Binary)
}
/// Creates a new builder instance for an ASCII STL file.
///
/// **Note**: please don't use this. STL ASCII files are even more space
/// inefficient than binary STL files. If you can avoid it, never use ASCII
/// STL. In fact, consider not using STL at all.
pub fn ascii() -> Self {
Self::new(Encoding::Ascii)
}
/// Sets the solid name for this file.
///
/// The given name must be an ASCII string (otherwise the function panics).
/// If a binary file is written, only 76 bytes of the string are written to
/// the file.
pub fn with_solid_name(self, name: impl Into<String>) -> Self {
let solid_name = name.into();
assert!(solid_name.is_ascii());
Self {
solid_name,
.. self
}
}
/// Creates a [`Writer`] with `self` as config.
pub fn into_writer<W: io::Write>(self, writer: W) -> Writer<W> {
Writer::new(self, writer)
}
}
// ===============================================================================================
// ===== STL Writer
// ===============================================================================================
/// A writer able to write binary and ASCII STL files. Implements
/// [`StreamSink`].
#[derive(Debug)]
pub struct Writer<W: io::Write> {
config: Config,
writer: W,
}
impl<W: io::Write> Writer<W> {
/// Creates a new STL writer with the given STL config which will write to
/// the given `io::Write` instance.
pub fn new(config: Config, writer: W) -> Self {
Self { config, writer }
}
/// Low level function to write STL files.
///
/// You usually don't need to use this function directly and instead use a
/// high level interface. This function is still exposed to give you more
/// or less complete control.
pub fn write_raw(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
if self.config.encoding == Encoding::Ascii {
self.write_raw_ascii(triangles)
} else {
self.write_raw_binary(num_triangles, triangles)
}
}
#[inline(never)]
pub fn write_raw_binary(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
// First, a 80 bytes useless header that must not begin with "solid".
// We try to fit the solid name in it.
let name_len = cmp::min(config.solid_name.len(), 76);
let signature = format!("LOX {}", &config.solid_name[..name_len]);
let padding = vec![b' '; 80 - signature.len()];
w.write_all(signature.as_bytes())?;
w.write_all(&padding)?;
// Next, number of triangles
w.write_u32::<LittleEndian>(num_triangles)?;
const TRI_SIZE: usize = 4 * 3 * 4 + 2;
let mut buf = [0; TRI_SIZE];
for triangle in triangles {
let triangle = triangle?;
// Write face normal
LittleEndian::write_f32(&mut buf[00..04], triangle.normal[0]);
LittleEndian::write_f32(&mut buf[04..08], triangle.normal[1]);
LittleEndian::write_f32(&mut buf[08..12], triangle.normal[2]);
LittleEndian::write_f32(&mut buf[12..16], triangle.vertices[0][0]);
LittleEndian::write_f32(&mut buf[16..20], triangle.vertices[0][1]);
LittleEndian::write_f32(&mut buf[20..24], triangle.vertices[0][2]);
LittleEndian::write_f32(&mut buf[24..28], triangle.vertices[1][0]);
LittleEndian::write_f32(&mut buf[28..32], triangle.vertices[1][1]);
LittleEndian::write_f32(&mut buf[32..36], triangle.vertices[1][2]);
LittleEndian::write_f32(&mut buf[36..40], triangle.vertices[2][0]);
LittleEndian::write_f32(&mut buf[40..44], triangle.vertices[2][1]);
LittleEndian::write_f32(&mut buf[44..48], triangle.vertices[2][2]);
LittleEndian::write_u16(&mut buf[48..50], triangle.attribute_byte_count);
w.write_all(&buf)?;
}
Ok(())
}
#[inline(never)]
pub fn write_raw_ascii(
self,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
writeln!(w, "solid {}", config.solid_name)?;
for triangle in triangles {
let triangle = triangle?;
// Write face normal
write!(w, " facet normal ")?;
write_ascii_vector(&mut w, triangle.normal)?;
writeln!(w, "")?;
// Write all vertex positions
writeln!(w, " outer loop")?;
for &vertex_pos in &triangle.vertices {
write!(w, " vertex ")?;
write_ascii_vector(&mut w, vertex_pos)?;
writeln!(w, "")?;
}
writeln!(w, " endloop")?;
writeln!(w, " endfacet")?;
}
writeln!(w, "endsolid {}", config.solid_name)?;
Ok(())
}
}
impl<W: io::Write> StreamSink for Writer<W> {
#[inline(never)]
fn transfer_from<S: MemSource>(self, src: &S) -> Result<(), Error> {
// Make sure we have positions
if src.vertex_position_type().is_none() {
return Err(Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: "source does not provide vertex positions, but STL requires them".into(),
}));
}
let mesh = src.core_mesh();
let has_normals = src.face_normal_type().is_some();
// The triangle iterator
let triangles = mesh.face_handles().map(|fh| {
let mut it = mesh.vertices_around_face(fh);
let va = it.next().expect("bug: less than 3 vertices around face");
let vb = it.next().expect("bug: less than 3 vertices around face");
let vc = it.next().expect("bug: less than 3 vertices around face");
// Make sure this is a triangle face. Note: we do not check
// `mesh.is_tri_mesh()` in the beginning, as we also want to be
// able to serialize triangle meshes whose type does not implement
// `TriMesh`. We only want to error if there is actually a non-tri
// face.
if it.next().is_some() {
return Err(Error::new(|| ErrorKind::StreamSinkDoesNotSupportPolygonFaces));
}
// Get positions from map and convert them to array
let get_v = |vh| -> Result<[f32; 3], Error> {
src.vertex_position::<f32>(vh)
.and_then(|opt| {
opt.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: format!("no position for {:?} while writing STL", vh),
}))
})
.map(|p| p.convert()) // to array form
};
let vertices = [get_v(va)?, get_v(vb)?, get_v(vc)?];
let normal = if has_normals {
src.face_normal::<f32>(fh)?
.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::FaceNormal,
msg: format!("no normal for {:?} while writing STL", fh),
}))?
.convert() // to array form
} else {
calc_normal(&vertices)
};
Ok(RawTriangle {
vertices,
normal,
// As Wikipedia beautifully put it: "this should be zero
// because most software does not understand anything else."
// Great. Some people abuse this to store color or other
// information. This is terrible, we won't do that.
attribute_byte_count: 0,
})
});
let face_count = mesh.num_faces().try_into().map_err(|_| {
Error::new(|| ErrorKind::SinkIncompatible(
"STL only supports 2^32 triangles, but mesh contains more faces".into()
))
})?;
self.write_raw(face_count, triangles)
}
}
// ===============================================================================================
// ===== Helper functions
// ===============================================================================================
/// Calculates the normal of the face defined be the three vertices in CCW.
fn calc_normal(positions: &[[f32; 3]; 3]) -> [f32; 3] {
let pos_a = positions[0].to_point3();
let pos_b = positions[1].to_point3();
let pos_c = positions[2].to_point3();
let normal = (pos_b - pos_a).cross(pos_c - pos_a).normalize();
[normal.x, normal.y, normal.z]
}
// ===============================================================================================
// ===== Functions for body writing
// ===============================================================================================
/// Writes the three values of the given vector (in STL ASCII encoding,
/// separated
/// by'') into the writer.
fn write_ascii_vector(w: &mut impl Write, [x, y, z]: [f32; 3]) -> Result<(), io::Error> {
write_ascii_f32(w, x)?;
write!(w, " ")?;
write_ascii_f32(w, y)?;
write!(w, " ")?;
write_ascii_f32(w, z)?;
Ok(())
}
/// Writes the given `f32` in STL ASCII format into the given writer.
///
/// The STL specification is terribly underspecified. The only information
/// about how to encode floats in ASCII is this:
///
/// > The numerical data in the facet normal and vertex lines are single
/// > precision floats, for example, 1.23456E+789. A facet normal coordinate
/// > may have a leading minus sign; a vertex coordinate may not.
///
/// I don't think the last sentence makes any sense: why forbid negative
/// coordinates? In any case, no one in the real world cares about that: there
/// are plenty of STL files out there with negative vertex coordinates.
///
/// About the actual format: clearly unhelpful. In real world, STL files floats
/// are encoded all over the place. I've seen `1`, `1.2`, `10.2`, `1.02e1`,
/// `1.020000E+001` and more. We just stick to the exact format mentioned in
/// the "specification". This does not necessarily make any sense and wastes
/// memory, but so does ASCII STL. Just don't use the ASCII STL format!
fn write_ascii_f32(w: &mut impl Write, v: f32) -> Result<(), io::Error> {
use std::num::FpCategory;
match v.classify() {
FpCategory::Normal | FpCategory::Subnormal => {
let exponent = v.abs().log10().floor();
let mantissa = v / 10f32.powf(exponent);
write!(w, "{}E{:+}", mantissa, exponent)
}
_ => {
// `v` is either infinite, `NaN` or zero. We want to serialize
// the zeroes as `0.0`.
write!(w, "{:.1}", v)
}
}
}
| binary | identifier_name |
write.rs | use std::{
cmp,
convert::TryInto,
io::{self, Write},
};
use byteorder::{ByteOrder, LittleEndian, WriteBytesExt};
use cgmath::prelude::*;
use crate::{
prelude::*,
io::{PropKind, Error, ErrorKind},
};
use super::{Encoding, RawTriangle};
// ----------------------------------------------------------------------------
/// The solid name used when the user didn't specify one.
const DEFAULT_SOLID_NAME: &str = "mesh";
// ===============================================================================================
// ===== STL Config
// ===============================================================================================
/// Used to configure and create a [`Writer`].
///
/// This is used to configure basic settings for the file to be written. You
/// can use the [`Config::into_writer`] method to create a [`Writer`] that can
/// be used as streaming sink.
#[derive(Clone, Debug)]
pub struct Config {
solid_name: String,
encoding: Encoding,
}
impl Config {
/// Creates a new builder instance from the given encoding. For
/// convenience, you can use [`Config::binary()`] or [`Config::ascii()`]
/// directly.
pub fn new(encoding: Encoding) -> Self {
Self {
solid_name: DEFAULT_SOLID_NAME.into(),
encoding,
}
}
/// Creates a new builder instance for a binary STL file.
pub fn binary() -> Self {
Self::new(Encoding::Binary)
}
/// Creates a new builder instance for an ASCII STL file.
///
/// **Note**: please don't use this. STL ASCII files are even more space
/// inefficient than binary STL files. If you can avoid it, never use ASCII
/// STL. In fact, consider not using STL at all.
pub fn ascii() -> Self {
Self::new(Encoding::Ascii)
}
/// Sets the solid name for this file.
///
/// The given name must be an ASCII string (otherwise the function panics).
/// If a binary file is written, only 76 bytes of the string are written to
/// the file.
pub fn with_solid_name(self, name: impl Into<String>) -> Self {
let solid_name = name.into();
assert!(solid_name.is_ascii());
Self {
solid_name,
.. self
}
}
/// Creates a [`Writer`] with `self` as config.
pub fn into_writer<W: io::Write>(self, writer: W) -> Writer<W> {
Writer::new(self, writer)
}
}
// ===============================================================================================
// ===== STL Writer
// ===============================================================================================
/// A writer able to write binary and ASCII STL files. Implements
/// [`StreamSink`].
#[derive(Debug)]
pub struct Writer<W: io::Write> {
config: Config,
writer: W,
}
impl<W: io::Write> Writer<W> {
/// Creates a new STL writer with the given STL config which will write to
/// the given `io::Write` instance.
pub fn new(config: Config, writer: W) -> Self {
Self { config, writer }
}
/// Low level function to write STL files.
///
/// You usually don't need to use this function directly and instead use a
/// high level interface. This function is still exposed to give you more
/// or less complete control.
pub fn write_raw(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> |
#[inline(never)]
pub fn write_raw_binary(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
// First, a 80 bytes useless header that must not begin with "solid".
// We try to fit the solid name in it.
let name_len = cmp::min(config.solid_name.len(), 76);
let signature = format!("LOX {}", &config.solid_name[..name_len]);
let padding = vec![b' '; 80 - signature.len()];
w.write_all(signature.as_bytes())?;
w.write_all(&padding)?;
// Next, number of triangles
w.write_u32::<LittleEndian>(num_triangles)?;
const TRI_SIZE: usize = 4 * 3 * 4 + 2;
let mut buf = [0; TRI_SIZE];
for triangle in triangles {
let triangle = triangle?;
// Write face normal
LittleEndian::write_f32(&mut buf[00..04], triangle.normal[0]);
LittleEndian::write_f32(&mut buf[04..08], triangle.normal[1]);
LittleEndian::write_f32(&mut buf[08..12], triangle.normal[2]);
LittleEndian::write_f32(&mut buf[12..16], triangle.vertices[0][0]);
LittleEndian::write_f32(&mut buf[16..20], triangle.vertices[0][1]);
LittleEndian::write_f32(&mut buf[20..24], triangle.vertices[0][2]);
LittleEndian::write_f32(&mut buf[24..28], triangle.vertices[1][0]);
LittleEndian::write_f32(&mut buf[28..32], triangle.vertices[1][1]);
LittleEndian::write_f32(&mut buf[32..36], triangle.vertices[1][2]);
LittleEndian::write_f32(&mut buf[36..40], triangle.vertices[2][0]);
LittleEndian::write_f32(&mut buf[40..44], triangle.vertices[2][1]);
LittleEndian::write_f32(&mut buf[44..48], triangle.vertices[2][2]);
LittleEndian::write_u16(&mut buf[48..50], triangle.attribute_byte_count);
w.write_all(&buf)?;
}
Ok(())
}
#[inline(never)]
pub fn write_raw_ascii(
self,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
writeln!(w, "solid {}", config.solid_name)?;
for triangle in triangles {
let triangle = triangle?;
// Write face normal
write!(w, " facet normal ")?;
write_ascii_vector(&mut w, triangle.normal)?;
writeln!(w, "")?;
// Write all vertex positions
writeln!(w, " outer loop")?;
for &vertex_pos in &triangle.vertices {
write!(w, " vertex ")?;
write_ascii_vector(&mut w, vertex_pos)?;
writeln!(w, "")?;
}
writeln!(w, " endloop")?;
writeln!(w, " endfacet")?;
}
writeln!(w, "endsolid {}", config.solid_name)?;
Ok(())
}
}
impl<W: io::Write> StreamSink for Writer<W> {
#[inline(never)]
fn transfer_from<S: MemSource>(self, src: &S) -> Result<(), Error> {
// Make sure we have positions
if src.vertex_position_type().is_none() {
return Err(Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: "source does not provide vertex positions, but STL requires them".into(),
}));
}
let mesh = src.core_mesh();
let has_normals = src.face_normal_type().is_some();
// The triangle iterator
let triangles = mesh.face_handles().map(|fh| {
let mut it = mesh.vertices_around_face(fh);
let va = it.next().expect("bug: less than 3 vertices around face");
let vb = it.next().expect("bug: less than 3 vertices around face");
let vc = it.next().expect("bug: less than 3 vertices around face");
// Make sure this is a triangle face. Note: we do not check
// `mesh.is_tri_mesh()` in the beginning, as we also want to be
// able to serialize triangle meshes whose type does not implement
// `TriMesh`. We only want to error if there is actually a non-tri
// face.
if it.next().is_some() {
return Err(Error::new(|| ErrorKind::StreamSinkDoesNotSupportPolygonFaces));
}
// Get positions from map and convert them to array
let get_v = |vh| -> Result<[f32; 3], Error> {
src.vertex_position::<f32>(vh)
.and_then(|opt| {
opt.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: format!("no position for {:?} while writing STL", vh),
}))
})
.map(|p| p.convert()) // to array form
};
let vertices = [get_v(va)?, get_v(vb)?, get_v(vc)?];
let normal = if has_normals {
src.face_normal::<f32>(fh)?
.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::FaceNormal,
msg: format!("no normal for {:?} while writing STL", fh),
}))?
.convert() // to array form
} else {
calc_normal(&vertices)
};
Ok(RawTriangle {
vertices,
normal,
// As Wikipedia beautifully put it: "this should be zero
// because most software does not understand anything else."
// Great. Some people abuse this to store color or other
// information. This is terrible, we won't do that.
attribute_byte_count: 0,
})
});
let face_count = mesh.num_faces().try_into().map_err(|_| {
Error::new(|| ErrorKind::SinkIncompatible(
"STL only supports 2^32 triangles, but mesh contains more faces".into()
))
})?;
self.write_raw(face_count, triangles)
}
}
// ===============================================================================================
// ===== Helper functions
// ===============================================================================================
/// Calculates the normal of the face defined be the three vertices in CCW.
fn calc_normal(positions: &[[f32; 3]; 3]) -> [f32; 3] {
let pos_a = positions[0].to_point3();
let pos_b = positions[1].to_point3();
let pos_c = positions[2].to_point3();
let normal = (pos_b - pos_a).cross(pos_c - pos_a).normalize();
[normal.x, normal.y, normal.z]
}
// ===============================================================================================
// ===== Functions for body writing
// ===============================================================================================
/// Writes the three values of the given vector (in STL ASCII encoding,
/// separated
/// by'') into the writer.
fn write_ascii_vector(w: &mut impl Write, [x, y, z]: [f32; 3]) -> Result<(), io::Error> {
write_ascii_f32(w, x)?;
write!(w, " ")?;
write_ascii_f32(w, y)?;
write!(w, " ")?;
write_ascii_f32(w, z)?;
Ok(())
}
/// Writes the given `f32` in STL ASCII format into the given writer.
///
/// The STL specification is terribly underspecified. The only information
/// about how to encode floats in ASCII is this:
///
/// > The numerical data in the facet normal and vertex lines are single
/// > precision floats, for example, 1.23456E+789. A facet normal coordinate
/// > may have a leading minus sign; a vertex coordinate may not.
///
/// I don't think the last sentence makes any sense: why forbid negative
/// coordinates? In any case, no one in the real world cares about that: there
/// are plenty of STL files out there with negative vertex coordinates.
///
/// About the actual format: clearly unhelpful. In real world, STL files floats
/// are encoded all over the place. I've seen `1`, `1.2`, `10.2`, `1.02e1`,
/// `1.020000E+001` and more. We just stick to the exact format mentioned in
/// the "specification". This does not necessarily make any sense and wastes
/// memory, but so does ASCII STL. Just don't use the ASCII STL format!
fn write_ascii_f32(w: &mut impl Write, v: f32) -> Result<(), io::Error> {
use std::num::FpCategory;
match v.classify() {
FpCategory::Normal | FpCategory::Subnormal => {
let exponent = v.abs().log10().floor();
let mantissa = v / 10f32.powf(exponent);
write!(w, "{}E{:+}", mantissa, exponent)
}
_ => {
// `v` is either infinite, `NaN` or zero. We want to serialize
// the zeroes as `0.0`.
write!(w, "{:.1}", v)
}
}
}
| {
if self.config.encoding == Encoding::Ascii {
self.write_raw_ascii(triangles)
} else {
self.write_raw_binary(num_triangles, triangles)
}
} | identifier_body |
write.rs | use std::{
cmp,
convert::TryInto,
io::{self, Write},
}; | use cgmath::prelude::*;
use crate::{
prelude::*,
io::{PropKind, Error, ErrorKind},
};
use super::{Encoding, RawTriangle};
// ----------------------------------------------------------------------------
/// The solid name used when the user didn't specify one.
const DEFAULT_SOLID_NAME: &str = "mesh";
// ===============================================================================================
// ===== STL Config
// ===============================================================================================
/// Used to configure and create a [`Writer`].
///
/// This is used to configure basic settings for the file to be written. You
/// can use the [`Config::into_writer`] method to create a [`Writer`] that can
/// be used as streaming sink.
#[derive(Clone, Debug)]
pub struct Config {
solid_name: String,
encoding: Encoding,
}
impl Config {
/// Creates a new builder instance from the given encoding. For
/// convenience, you can use [`Config::binary()`] or [`Config::ascii()`]
/// directly.
pub fn new(encoding: Encoding) -> Self {
Self {
solid_name: DEFAULT_SOLID_NAME.into(),
encoding,
}
}
/// Creates a new builder instance for a binary STL file.
pub fn binary() -> Self {
Self::new(Encoding::Binary)
}
/// Creates a new builder instance for an ASCII STL file.
///
/// **Note**: please don't use this. STL ASCII files are even more space
/// inefficient than binary STL files. If you can avoid it, never use ASCII
/// STL. In fact, consider not using STL at all.
pub fn ascii() -> Self {
Self::new(Encoding::Ascii)
}
/// Sets the solid name for this file.
///
/// The given name must be an ASCII string (otherwise the function panics).
/// If a binary file is written, only 76 bytes of the string are written to
/// the file.
pub fn with_solid_name(self, name: impl Into<String>) -> Self {
let solid_name = name.into();
assert!(solid_name.is_ascii());
Self {
solid_name,
.. self
}
}
/// Creates a [`Writer`] with `self` as config.
pub fn into_writer<W: io::Write>(self, writer: W) -> Writer<W> {
Writer::new(self, writer)
}
}
// ===============================================================================================
// ===== STL Writer
// ===============================================================================================
/// A writer able to write binary and ASCII STL files. Implements
/// [`StreamSink`].
#[derive(Debug)]
pub struct Writer<W: io::Write> {
config: Config,
writer: W,
}
impl<W: io::Write> Writer<W> {
/// Creates a new STL writer with the given STL config which will write to
/// the given `io::Write` instance.
pub fn new(config: Config, writer: W) -> Self {
Self { config, writer }
}
/// Low level function to write STL files.
///
/// You usually don't need to use this function directly and instead use a
/// high level interface. This function is still exposed to give you more
/// or less complete control.
pub fn write_raw(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
if self.config.encoding == Encoding::Ascii {
self.write_raw_ascii(triangles)
} else {
self.write_raw_binary(num_triangles, triangles)
}
}
#[inline(never)]
pub fn write_raw_binary(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
// First, a 80 bytes useless header that must not begin with "solid".
// We try to fit the solid name in it.
let name_len = cmp::min(config.solid_name.len(), 76);
let signature = format!("LOX {}", &config.solid_name[..name_len]);
let padding = vec![b' '; 80 - signature.len()];
w.write_all(signature.as_bytes())?;
w.write_all(&padding)?;
// Next, number of triangles
w.write_u32::<LittleEndian>(num_triangles)?;
const TRI_SIZE: usize = 4 * 3 * 4 + 2;
let mut buf = [0; TRI_SIZE];
for triangle in triangles {
let triangle = triangle?;
// Write face normal
LittleEndian::write_f32(&mut buf[00..04], triangle.normal[0]);
LittleEndian::write_f32(&mut buf[04..08], triangle.normal[1]);
LittleEndian::write_f32(&mut buf[08..12], triangle.normal[2]);
LittleEndian::write_f32(&mut buf[12..16], triangle.vertices[0][0]);
LittleEndian::write_f32(&mut buf[16..20], triangle.vertices[0][1]);
LittleEndian::write_f32(&mut buf[20..24], triangle.vertices[0][2]);
LittleEndian::write_f32(&mut buf[24..28], triangle.vertices[1][0]);
LittleEndian::write_f32(&mut buf[28..32], triangle.vertices[1][1]);
LittleEndian::write_f32(&mut buf[32..36], triangle.vertices[1][2]);
LittleEndian::write_f32(&mut buf[36..40], triangle.vertices[2][0]);
LittleEndian::write_f32(&mut buf[40..44], triangle.vertices[2][1]);
LittleEndian::write_f32(&mut buf[44..48], triangle.vertices[2][2]);
LittleEndian::write_u16(&mut buf[48..50], triangle.attribute_byte_count);
w.write_all(&buf)?;
}
Ok(())
}
#[inline(never)]
pub fn write_raw_ascii(
self,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
writeln!(w, "solid {}", config.solid_name)?;
for triangle in triangles {
let triangle = triangle?;
// Write face normal
write!(w, " facet normal ")?;
write_ascii_vector(&mut w, triangle.normal)?;
writeln!(w, "")?;
// Write all vertex positions
writeln!(w, " outer loop")?;
for &vertex_pos in &triangle.vertices {
write!(w, " vertex ")?;
write_ascii_vector(&mut w, vertex_pos)?;
writeln!(w, "")?;
}
writeln!(w, " endloop")?;
writeln!(w, " endfacet")?;
}
writeln!(w, "endsolid {}", config.solid_name)?;
Ok(())
}
}
impl<W: io::Write> StreamSink for Writer<W> {
#[inline(never)]
fn transfer_from<S: MemSource>(self, src: &S) -> Result<(), Error> {
// Make sure we have positions
if src.vertex_position_type().is_none() {
return Err(Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: "source does not provide vertex positions, but STL requires them".into(),
}));
}
let mesh = src.core_mesh();
let has_normals = src.face_normal_type().is_some();
// The triangle iterator
let triangles = mesh.face_handles().map(|fh| {
let mut it = mesh.vertices_around_face(fh);
let va = it.next().expect("bug: less than 3 vertices around face");
let vb = it.next().expect("bug: less than 3 vertices around face");
let vc = it.next().expect("bug: less than 3 vertices around face");
// Make sure this is a triangle face. Note: we do not check
// `mesh.is_tri_mesh()` in the beginning, as we also want to be
// able to serialize triangle meshes whose type does not implement
// `TriMesh`. We only want to error if there is actually a non-tri
// face.
if it.next().is_some() {
return Err(Error::new(|| ErrorKind::StreamSinkDoesNotSupportPolygonFaces));
}
// Get positions from map and convert them to array
let get_v = |vh| -> Result<[f32; 3], Error> {
src.vertex_position::<f32>(vh)
.and_then(|opt| {
opt.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: format!("no position for {:?} while writing STL", vh),
}))
})
.map(|p| p.convert()) // to array form
};
let vertices = [get_v(va)?, get_v(vb)?, get_v(vc)?];
let normal = if has_normals {
src.face_normal::<f32>(fh)?
.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::FaceNormal,
msg: format!("no normal for {:?} while writing STL", fh),
}))?
.convert() // to array form
} else {
calc_normal(&vertices)
};
Ok(RawTriangle {
vertices,
normal,
// As Wikipedia beautifully put it: "this should be zero
// because most software does not understand anything else."
// Great. Some people abuse this to store color or other
// information. This is terrible, we won't do that.
attribute_byte_count: 0,
})
});
let face_count = mesh.num_faces().try_into().map_err(|_| {
Error::new(|| ErrorKind::SinkIncompatible(
"STL only supports 2^32 triangles, but mesh contains more faces".into()
))
})?;
self.write_raw(face_count, triangles)
}
}
// ===============================================================================================
// ===== Helper functions
// ===============================================================================================
/// Calculates the normal of the face defined be the three vertices in CCW.
fn calc_normal(positions: &[[f32; 3]; 3]) -> [f32; 3] {
let pos_a = positions[0].to_point3();
let pos_b = positions[1].to_point3();
let pos_c = positions[2].to_point3();
let normal = (pos_b - pos_a).cross(pos_c - pos_a).normalize();
[normal.x, normal.y, normal.z]
}
// ===============================================================================================
// ===== Functions for body writing
// ===============================================================================================
/// Writes the three values of the given vector (in STL ASCII encoding,
/// separated
/// by'') into the writer.
fn write_ascii_vector(w: &mut impl Write, [x, y, z]: [f32; 3]) -> Result<(), io::Error> {
write_ascii_f32(w, x)?;
write!(w, " ")?;
write_ascii_f32(w, y)?;
write!(w, " ")?;
write_ascii_f32(w, z)?;
Ok(())
}
/// Writes the given `f32` in STL ASCII format into the given writer.
///
/// The STL specification is terribly underspecified. The only information
/// about how to encode floats in ASCII is this:
///
/// > The numerical data in the facet normal and vertex lines are single
/// > precision floats, for example, 1.23456E+789. A facet normal coordinate
/// > may have a leading minus sign; a vertex coordinate may not.
///
/// I don't think the last sentence makes any sense: why forbid negative
/// coordinates? In any case, no one in the real world cares about that: there
/// are plenty of STL files out there with negative vertex coordinates.
///
/// About the actual format: clearly unhelpful. In real world, STL files floats
/// are encoded all over the place. I've seen `1`, `1.2`, `10.2`, `1.02e1`,
/// `1.020000E+001` and more. We just stick to the exact format mentioned in
/// the "specification". This does not necessarily make any sense and wastes
/// memory, but so does ASCII STL. Just don't use the ASCII STL format!
fn write_ascii_f32(w: &mut impl Write, v: f32) -> Result<(), io::Error> {
use std::num::FpCategory;
match v.classify() {
FpCategory::Normal | FpCategory::Subnormal => {
let exponent = v.abs().log10().floor();
let mantissa = v / 10f32.powf(exponent);
write!(w, "{}E{:+}", mantissa, exponent)
}
_ => {
// `v` is either infinite, `NaN` or zero. We want to serialize
// the zeroes as `0.0`.
write!(w, "{:.1}", v)
}
}
} |
use byteorder::{ByteOrder, LittleEndian, WriteBytesExt}; | random_line_split |
write.rs | use std::{
cmp,
convert::TryInto,
io::{self, Write},
};
use byteorder::{ByteOrder, LittleEndian, WriteBytesExt};
use cgmath::prelude::*;
use crate::{
prelude::*,
io::{PropKind, Error, ErrorKind},
};
use super::{Encoding, RawTriangle};
// ----------------------------------------------------------------------------
/// The solid name used when the user didn't specify one.
const DEFAULT_SOLID_NAME: &str = "mesh";
// ===============================================================================================
// ===== STL Config
// ===============================================================================================
/// Used to configure and create a [`Writer`].
///
/// This is used to configure basic settings for the file to be written. You
/// can use the [`Config::into_writer`] method to create a [`Writer`] that can
/// be used as streaming sink.
#[derive(Clone, Debug)]
pub struct Config {
solid_name: String,
encoding: Encoding,
}
impl Config {
/// Creates a new builder instance from the given encoding. For
/// convenience, you can use [`Config::binary()`] or [`Config::ascii()`]
/// directly.
pub fn new(encoding: Encoding) -> Self {
Self {
solid_name: DEFAULT_SOLID_NAME.into(),
encoding,
}
}
/// Creates a new builder instance for a binary STL file.
pub fn binary() -> Self {
Self::new(Encoding::Binary)
}
/// Creates a new builder instance for an ASCII STL file.
///
/// **Note**: please don't use this. STL ASCII files are even more space
/// inefficient than binary STL files. If you can avoid it, never use ASCII
/// STL. In fact, consider not using STL at all.
pub fn ascii() -> Self {
Self::new(Encoding::Ascii)
}
/// Sets the solid name for this file.
///
/// The given name must be an ASCII string (otherwise the function panics).
/// If a binary file is written, only 76 bytes of the string are written to
/// the file.
pub fn with_solid_name(self, name: impl Into<String>) -> Self {
let solid_name = name.into();
assert!(solid_name.is_ascii());
Self {
solid_name,
.. self
}
}
/// Creates a [`Writer`] with `self` as config.
pub fn into_writer<W: io::Write>(self, writer: W) -> Writer<W> {
Writer::new(self, writer)
}
}
// ===============================================================================================
// ===== STL Writer
// ===============================================================================================
/// A writer able to write binary and ASCII STL files. Implements
/// [`StreamSink`].
#[derive(Debug)]
pub struct Writer<W: io::Write> {
config: Config,
writer: W,
}
impl<W: io::Write> Writer<W> {
/// Creates a new STL writer with the given STL config which will write to
/// the given `io::Write` instance.
pub fn new(config: Config, writer: W) -> Self {
Self { config, writer }
}
/// Low level function to write STL files.
///
/// You usually don't need to use this function directly and instead use a
/// high level interface. This function is still exposed to give you more
/// or less complete control.
pub fn write_raw(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
if self.config.encoding == Encoding::Ascii {
self.write_raw_ascii(triangles)
} else {
self.write_raw_binary(num_triangles, triangles)
}
}
#[inline(never)]
pub fn write_raw_binary(
self,
num_triangles: u32,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
// First, a 80 bytes useless header that must not begin with "solid".
// We try to fit the solid name in it.
let name_len = cmp::min(config.solid_name.len(), 76);
let signature = format!("LOX {}", &config.solid_name[..name_len]);
let padding = vec![b' '; 80 - signature.len()];
w.write_all(signature.as_bytes())?;
w.write_all(&padding)?;
// Next, number of triangles
w.write_u32::<LittleEndian>(num_triangles)?;
const TRI_SIZE: usize = 4 * 3 * 4 + 2;
let mut buf = [0; TRI_SIZE];
for triangle in triangles {
let triangle = triangle?;
// Write face normal
LittleEndian::write_f32(&mut buf[00..04], triangle.normal[0]);
LittleEndian::write_f32(&mut buf[04..08], triangle.normal[1]);
LittleEndian::write_f32(&mut buf[08..12], triangle.normal[2]);
LittleEndian::write_f32(&mut buf[12..16], triangle.vertices[0][0]);
LittleEndian::write_f32(&mut buf[16..20], triangle.vertices[0][1]);
LittleEndian::write_f32(&mut buf[20..24], triangle.vertices[0][2]);
LittleEndian::write_f32(&mut buf[24..28], triangle.vertices[1][0]);
LittleEndian::write_f32(&mut buf[28..32], triangle.vertices[1][1]);
LittleEndian::write_f32(&mut buf[32..36], triangle.vertices[1][2]);
LittleEndian::write_f32(&mut buf[36..40], triangle.vertices[2][0]);
LittleEndian::write_f32(&mut buf[40..44], triangle.vertices[2][1]);
LittleEndian::write_f32(&mut buf[44..48], triangle.vertices[2][2]);
LittleEndian::write_u16(&mut buf[48..50], triangle.attribute_byte_count);
w.write_all(&buf)?;
}
Ok(())
}
#[inline(never)]
pub fn write_raw_ascii(
self,
triangles: impl IntoIterator<Item = Result<RawTriangle, Error>>,
) -> Result<(), Error> {
let config = self.config;
let mut w = self.writer;
writeln!(w, "solid {}", config.solid_name)?;
for triangle in triangles {
let triangle = triangle?;
// Write face normal
write!(w, " facet normal ")?;
write_ascii_vector(&mut w, triangle.normal)?;
writeln!(w, "")?;
// Write all vertex positions
writeln!(w, " outer loop")?;
for &vertex_pos in &triangle.vertices {
write!(w, " vertex ")?;
write_ascii_vector(&mut w, vertex_pos)?;
writeln!(w, "")?;
}
writeln!(w, " endloop")?;
writeln!(w, " endfacet")?;
}
writeln!(w, "endsolid {}", config.solid_name)?;
Ok(())
}
}
impl<W: io::Write> StreamSink for Writer<W> {
#[inline(never)]
fn transfer_from<S: MemSource>(self, src: &S) -> Result<(), Error> {
// Make sure we have positions
if src.vertex_position_type().is_none() |
let mesh = src.core_mesh();
let has_normals = src.face_normal_type().is_some();
// The triangle iterator
let triangles = mesh.face_handles().map(|fh| {
let mut it = mesh.vertices_around_face(fh);
let va = it.next().expect("bug: less than 3 vertices around face");
let vb = it.next().expect("bug: less than 3 vertices around face");
let vc = it.next().expect("bug: less than 3 vertices around face");
// Make sure this is a triangle face. Note: we do not check
// `mesh.is_tri_mesh()` in the beginning, as we also want to be
// able to serialize triangle meshes whose type does not implement
// `TriMesh`. We only want to error if there is actually a non-tri
// face.
if it.next().is_some() {
return Err(Error::new(|| ErrorKind::StreamSinkDoesNotSupportPolygonFaces));
}
// Get positions from map and convert them to array
let get_v = |vh| -> Result<[f32; 3], Error> {
src.vertex_position::<f32>(vh)
.and_then(|opt| {
opt.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: format!("no position for {:?} while writing STL", vh),
}))
})
.map(|p| p.convert()) // to array form
};
let vertices = [get_v(va)?, get_v(vb)?, get_v(vc)?];
let normal = if has_normals {
src.face_normal::<f32>(fh)?
.ok_or_else(|| Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::FaceNormal,
msg: format!("no normal for {:?} while writing STL", fh),
}))?
.convert() // to array form
} else {
calc_normal(&vertices)
};
Ok(RawTriangle {
vertices,
normal,
// As Wikipedia beautifully put it: "this should be zero
// because most software does not understand anything else."
// Great. Some people abuse this to store color or other
// information. This is terrible, we won't do that.
attribute_byte_count: 0,
})
});
let face_count = mesh.num_faces().try_into().map_err(|_| {
Error::new(|| ErrorKind::SinkIncompatible(
"STL only supports 2^32 triangles, but mesh contains more faces".into()
))
})?;
self.write_raw(face_count, triangles)
}
}
// ===============================================================================================
// ===== Helper functions
// ===============================================================================================
/// Calculates the normal of the face defined be the three vertices in CCW.
fn calc_normal(positions: &[[f32; 3]; 3]) -> [f32; 3] {
let pos_a = positions[0].to_point3();
let pos_b = positions[1].to_point3();
let pos_c = positions[2].to_point3();
let normal = (pos_b - pos_a).cross(pos_c - pos_a).normalize();
[normal.x, normal.y, normal.z]
}
// ===============================================================================================
// ===== Functions for body writing
// ===============================================================================================
/// Writes the three values of the given vector (in STL ASCII encoding,
/// separated
/// by'') into the writer.
fn write_ascii_vector(w: &mut impl Write, [x, y, z]: [f32; 3]) -> Result<(), io::Error> {
write_ascii_f32(w, x)?;
write!(w, " ")?;
write_ascii_f32(w, y)?;
write!(w, " ")?;
write_ascii_f32(w, z)?;
Ok(())
}
/// Writes the given `f32` in STL ASCII format into the given writer.
///
/// The STL specification is terribly underspecified. The only information
/// about how to encode floats in ASCII is this:
///
/// > The numerical data in the facet normal and vertex lines are single
/// > precision floats, for example, 1.23456E+789. A facet normal coordinate
/// > may have a leading minus sign; a vertex coordinate may not.
///
/// I don't think the last sentence makes any sense: why forbid negative
/// coordinates? In any case, no one in the real world cares about that: there
/// are plenty of STL files out there with negative vertex coordinates.
///
/// About the actual format: clearly unhelpful. In real world, STL files floats
/// are encoded all over the place. I've seen `1`, `1.2`, `10.2`, `1.02e1`,
/// `1.020000E+001` and more. We just stick to the exact format mentioned in
/// the "specification". This does not necessarily make any sense and wastes
/// memory, but so does ASCII STL. Just don't use the ASCII STL format!
fn write_ascii_f32(w: &mut impl Write, v: f32) -> Result<(), io::Error> {
use std::num::FpCategory;
match v.classify() {
FpCategory::Normal | FpCategory::Subnormal => {
let exponent = v.abs().log10().floor();
let mantissa = v / 10f32.powf(exponent);
write!(w, "{}E{:+}", mantissa, exponent)
}
_ => {
// `v` is either infinite, `NaN` or zero. We want to serialize
// the zeroes as `0.0`.
write!(w, "{:.1}", v)
}
}
}
| {
return Err(Error::new(|| ErrorKind::DataIncomplete {
prop: PropKind::VertexPosition,
msg: "source does not provide vertex positions, but STL requires them".into(),
}));
} | conditional_block |
parse.rs | ::FromStr;
use crate::repr::{AnchorLocation, Pattern, Repetition};
/// The entry point for this module: Parse a string into a `Pattern` that can be optimized and/or
/// compiled.
pub fn parse(s: &str) -> Result<Pattern, String> {
let src: Vec<char> = s.chars().collect();
parse_re(ParseState::new(&src)).map(|t| t.0)
}
/// ParseStack contains already parsed elements of a regular expression, and is used for parsing
/// textual regular expressions (as the parsing algorithm is stack-based). It can be converted to
/// an Pattern.
struct ParseStack {
s: Vec<Pattern>,
}
impl ParseStack {
fn new() -> ParseStack {
ParseStack {
s: Vec::with_capacity(4),
}
}
fn push(&mut self, p: Pattern) {
self.s.push(p)
}
fn pop(&mut self) -> Option<Pattern> {
self.s.pop()
}
fn empty(&self) -> bool {
self.s.is_empty()
}
fn to_pattern(mut self) -> Pattern {
if self.s.len() > 1 {
Pattern::Concat(self.s)
} else if self.s.len() == 1 {
self.s.pop().unwrap()
} else {
panic!("empty stack")
}
}
}
/// State of the parser, quite a simple struct. It contains the current substring that a parser
/// function is concerned with as well as the position within the overall parsed string, so that
/// useful positions can be reported to users. In addition, it provides functions to cheaply create
/// "sub-ParseStates" containing a substring of its current string.
///
/// It also supports indexing by ranges and index.
struct ParseState<'a> {
/// The string to parse. This may be a substring of the "overall" matched string.
src: &'a [char],
/// The position within the overall string (for error reporting).
pos: usize,
}
impl<'a> ParseState<'a> {
/// new returns a new ParseState operating on the specified input string.
fn new(s: &'a [char]) -> ParseState<'a> {
ParseState { src: s, pos: 0 }
}
/// from returns a new ParseState operating on the [from..] sub-string of the current
/// ParseState.
fn from(&self, from: usize) -> ParseState<'a> {
self.sub(from, self.len())
}
/// pos returns the overall position within the input regex.
fn pos(&self) -> usize {
self.pos
}
/// sub returns a sub-ParseState containing [from..to] of the current one.
fn sub(&self, from: usize, to: usize) -> ParseState<'a> {
ParseState {
src: &self.src[from..to],
pos: self.pos + from,
}
}
/// len returns how many characters this ParseState contains.
fn len(&self) -> usize {
self.src.len()
}
/// err returns a formatted error string containing the specified message and the overall
/// position within the original input string.
fn err<T>(&self, s: &str, i: usize) -> Result<T, String> {
Err(format!("{} at :{}", s, self.pos + i))
}
}
impl<'a> Index<Range<usize>> for ParseState<'a> {
type Output = [char];
fn index(&self, r: Range<usize>) -> &Self::Output {
&self.src[r]
}
}
impl<'a> Index<RangeFull> for ParseState<'a> {
type Output = [char];
fn index(&self, r: RangeFull) -> &Self::Output {
&self.src[r]
}
}
impl<'a> Index<usize> for ParseState<'a> {
type Output = char;
fn index(&self, i: usize) -> &Self::Output {
&self.src[i]
}
}
impl<'a> Clone for ParseState<'a> {
fn clone(&self) -> ParseState<'a> {
ParseState {
src: self.src,
pos: self.pos,
}
}
}
/// parse_re is the parser entry point; like all parser functions, it returns either a pair of
/// (parsed pattern, new ParseState) or an error string.
fn parse_re<'a>(mut s: ParseState<'a>) -> Result<(Pattern, ParseState<'a>), String> {
// The stack assists us in parsing the linear parts of a regular expression, e.g. non-pattern
// characters, or character sets.
let mut stack = ParseStack::new();
loop {
if s.len() == 0 {
break;
}
match s[0] {
'.' => {
stack.push(Pattern::Any);
s = s.from(1);
}
'$' => {
if s.len() == 1 {
stack.push(Pattern::Anchor(AnchorLocation::End));
} else {
stack.push(Pattern::Char('$'))
}
s = s.from(1);
}
'^' => {
if s.pos() == 0 {
stack.push(Pattern::Anchor(AnchorLocation::Begin));
} else {
stack.push(Pattern::Char('^'));
}
s = s.from(1);
}
r @ '+' | r @ '*' | r @ '?' => {
if let Some(p) = stack.pop() {
let rep = match r {
'+' => Repetition::OnceOrMore(p),
'*' => Repetition::ZeroOrMore(p),
'?' => Repetition::ZeroOrOnce(p),
_ => unimplemented!(),
};
stack.push(Pattern::Repeated(Box::new(rep)));
s = s.from(1);
} else {
return s.err("+ without pattern to repeat", 0);
}
}
// Alternation: Parse the expression on the right of the pipe sign and push an
// alternation between what we've already seen and the stuff on the right.
'|' => {
let (rest, newst) = parse_re(s.from(1))?;
let left = stack.to_pattern();
stack = ParseStack::new();
stack.push(Pattern::Alternate(vec![left, rest]));
s = newst;
}
'(' => {
match split_in_parens(s.clone(), ROUND_PARENS) {
Some((parens, newst)) => {
// Parse the sub-regex within parentheses.
let (pat, rest) = parse_re(parens)?;
assert!(rest.len() == 0);
stack.push(Pattern::Submatch(Box::new(pat)));
// Set the current state to contain the string after the parentheses.
s = newst;
}
None => return s.err("unmatched (", s.len()),
}
}
')' => return s.err("unopened ')'", 0),
'[' => match parse_char_set(s) {
Ok((pat, newst)) => {
stack.push(pat);
s = newst;
}
Err(e) => return Err(e),
},
']' => return s.err("unopened ']'", 0),
'{' => {
match split_in_parens(s.clone(), CURLY_BRACKETS) {
Some((rep, newst)) => {
if let Some(p) = stack.pop() {
let rep = parse_specific_repetition(rep, p)?;
stack.push(rep);
s = newst;
} else {
return s.err("repetition {} without pattern to repeat", 0);
}
}
None => return s.err("unmatched {", s.len()),
};
}
c => {
stack.push(Pattern::Char(c));
s = s.from(1);
}
}
}
Ok((stack.to_pattern(), s))
}
/// parse_char_set parses the character set at the start of the input state.
/// Valid states are [a], [ab], [a-z], [-a-z], [a-z-] and [a-fh-kl].
fn parse_char_set<'a>(s: ParseState<'a>) -> Result<(Pattern, ParseState<'a>), String> {
if let Some((cs, rest)) = split_in_parens(s.clone(), SQUARE_BRACKETS) {
let mut chars: Vec<char> = vec![];
let mut ranges: Vec<Pattern> = vec![];
let mut st = cs;
loop {
// Try to match a range "a-z" by looking for the dash; if no dash, add character to set
// and advance.
if st.len() >= 3 && st[1] == '-' {
ranges.push(Pattern::CharRange(st[0], st[2]));
st = st.from(3);
} else if st.len() > 0 {
chars.push(st[0]);
st = st.from(1);
} else {
break;
}
}
assert_eq!(st.len(), 0);
if chars.len() == 1 {
ranges.push(Pattern::Char(chars.pop().unwrap()));
} else if!chars.is_empty() {
ranges.push(Pattern::CharSet(chars));
}
if ranges.len() == 1 {
Ok((ranges.pop().unwrap(), rest))
} else {
let pat = Pattern::Alternate(ranges);
Ok((pat, rest))
}
} else {
s.err("unmatched [", s.len())
}
}
/// Parse a repetition spec inside curly braces: {1} | {1,} | {,1} | {1,2}
fn parse_specific_repetition<'a>(rep: ParseState<'a>, p: Pattern) -> Result<Pattern, String> {
let mut nparts = 0;
let mut parts: [Option<&[char]>; 2] = Default::default();
for p in rep[..].split(|c| *c == ',') {
parts[nparts] = Some(p);
nparts += 1;
if nparts == 2 {
break;
}
}
if nparts == 0 {
// {}
return rep.err("empty {} spec", 0);
} else if nparts == 1 {
// {1}
if let Ok(n) = u32::from_str(&String::from_iter(parts[0].unwrap().iter())) {
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p, n, None,
))));
} else {
return Err(format!(
"invalid repetition '{}'",
String::from_iter(rep[..].iter())
));
}
} else if nparts == 2 {
fn errtostr(r: Result<u32, std::num::ParseIntError>) -> Result<u32, String> {
match r {
Ok(u) => Ok(u),
Err(e) => Err(format!("{}", e)),
}
}
let (p0, p1) = (parts[0].unwrap(), parts[1].unwrap());
// {2,3}
if!p0.is_empty() &&!p1.is_empty() {
let min = errtostr(u32::from_str(&String::from_iter(p0.iter())))?;
let max = errtostr(u32::from_str(&String::from_iter(p1.iter())))?;
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p,
min,
Some(max),
))));
} else if p0.is_empty() &&!p1.is_empty() {
// {,3}
let min = 0;
let max = errtostr(u32::from_str(&String::from_iter(p1.iter())))?;
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p,
min,
Some(max),
))));
} else if!p0.is_empty() && p1.is_empty() {
// {3,}
let min = errtostr(u32::from_str(&String::from_iter(p0.iter())))?;
let repetition =
Pattern::Repeated(Box::new(Repetition::Specific(p.clone(), min, None)));
return Ok(Pattern::Concat(vec![
repetition,
Pattern::Repeated(Box::new(Repetition::ZeroOrMore(p))),
]));
}
}
Err(format!("invalid repetition pattern {:?}", &rep[..]))
}
/// Constants for generalizing parsing of parentheses.
const ROUND_PARENS: (char, char) = ('(', ')');
/// Constants for generalizing parsing of parentheses.
const SQUARE_BRACKETS: (char, char) = ('[', ']');
/// Constants for generalizing parsing of parentheses.
const CURLY_BRACKETS: (char, char) = ('{', '}');
/// split_in_parens returns two new ParseStates; the first one containing the contents of the
/// parenthesized clause starting at s[0], the second one containing the rest.
fn split_in_parens<'a>(
s: ParseState<'a>,
parens: (char, char),
) -> Option<(ParseState<'a>, ParseState<'a>)> {
if let Some(end) = find_closing_paren(s.clone(), parens) {
Some((s.sub(1, end), s.from(end + 1)))
} else {
None
}
}
/// find_closing_paren returns the index of the parenthesis closing the opening parenthesis at the
/// beginning of the state's string.
fn find_closing_paren<'a>(s: ParseState<'a>, parens: (char, char)) -> Option<usize> {
if s[0]!= parens.0 {
return None;
}
let mut count = 0;
for i in 0..s.len() {
if s[i] == parens.0 {
count += 1;
} else if s[i] == parens.1 {
count -= 1;
}
if count == 0 {
return Some(i);
}
}
None
}
#[cfg(test)]
mod tests {
use super::*;
use crate::compile::*;
use crate::repr::*;
use crate::state::dot;
#[test]
fn test_find_closing_paren() {
for case in &[
("(abc)de", Some(4)),
("()a", Some(1)),
("(abcd)", Some(5)),
("(abc", None),
] {
let src: Vec<char> = case.0.chars().collect();
assert_eq!(
find_closing_paren(ParseState::new(src.as_ref()), ROUND_PARENS),
case.1
);
}
}
#[test]
fn test_parse_charset() {
for case in &[
("[a]", Pattern::Char('a')),
("[ab]", Pattern::CharSet(vec!['a', 'b'])),
("[ba-]", Pattern::CharSet(vec!['b', 'a', '-'])),
("[a-z]", Pattern::CharRange('a', 'z')),
(
"[a-z-]",
Pattern::Alternate(vec![Pattern::CharRange('a', 'z'), Pattern::Char('-')]),
),
(
"[-a-z-]",
Pattern::Alternate(vec![
Pattern::CharRange('a', 'z'),
Pattern::CharSet(vec!['-', '-']),
]),
),
(
"[a-zA-Z]",
Pattern::Alternate(vec![
Pattern::CharRange('a', 'z'),
Pattern::CharRange('A', 'Z'),
]),
),
(
"[a-zA-Z-]",
Pattern::Alternate(vec![
Pattern::CharRange('a', 'z'),
Pattern::CharRange('A', 'Z'),
Pattern::Char('-'),
]),
),
] {
let src: Vec<char> = case.0.chars().collect();
let st = ParseState::new(&src);
assert_eq!(parse_char_set(st).unwrap().0, case.1);
}
}
#[test]
fn test_parse_subs() |
#[test]
fn test_parse_res() {
let case1 = (
"a(Bcd)e",
Pattern::Concat(vec![
Pattern::Char('a'),
Pattern::Submatch(Box::new(Pattern::Concat(vec![
Pattern::Char('B'),
Pattern::Char('c'),
Pattern::Char('d'),
]))),
Pattern::Char('e'),
]),
);
for c in &[case1] {
assert_eq!(c.1, parse(c.0).unwrap());
}
}
#[test]
fn test_parse_res_errors() {
let case1 = ("ac)d", "unopened ')' at :2");
let case2 = ("(ac)d)", "unopened ')' at :5");
let case3 = ("[ac]d]", "unopened ']' at :5");
let case4 = ("(ac)d]", "unopened ']' at :5");
for c in &[case1, case2, case3, case4] {
assert_eq!(c.1, parse(c.0).unwrap_err());
}
}
#[test]
fn test_parse_repetition_manual() {
println!(
"digraph st {{ {} }}",
dot(&start_compile(&parse("[abc]{1,5}"). | {
let case1 = (
"a(b)c",
Pattern::Concat(vec![
Pattern::Char('a'),
Pattern::Submatch(Box::new(Pattern::Char('b'))),
Pattern::Char('c'),
]),
);
let case2 = ("(b)", Pattern::Submatch(Box::new(Pattern::Char('b'))));
for c in &[case1, case2] {
assert_eq!(c.1, parse(c.0).unwrap());
}
} | identifier_body |
parse.rs | str::FromStr;
use crate::repr::{AnchorLocation, Pattern, Repetition};
/// The entry point for this module: Parse a string into a `Pattern` that can be optimized and/or
/// compiled.
pub fn parse(s: &str) -> Result<Pattern, String> {
let src: Vec<char> = s.chars().collect();
parse_re(ParseState::new(&src)).map(|t| t.0)
}
/// ParseStack contains already parsed elements of a regular expression, and is used for parsing
/// textual regular expressions (as the parsing algorithm is stack-based). It can be converted to
/// an Pattern.
struct ParseStack {
s: Vec<Pattern>,
}
impl ParseStack {
fn new() -> ParseStack {
ParseStack {
s: Vec::with_capacity(4),
}
}
fn push(&mut self, p: Pattern) {
self.s.push(p)
}
fn pop(&mut self) -> Option<Pattern> {
self.s.pop()
}
fn empty(&self) -> bool {
self.s.is_empty()
}
fn to_pattern(mut self) -> Pattern {
if self.s.len() > 1 {
Pattern::Concat(self.s)
} else if self.s.len() == 1 {
self.s.pop().unwrap()
} else {
panic!("empty stack")
}
}
}
/// State of the parser, quite a simple struct. It contains the current substring that a parser
/// function is concerned with as well as the position within the overall parsed string, so that
/// useful positions can be reported to users. In addition, it provides functions to cheaply create
/// "sub-ParseStates" containing a substring of its current string.
///
/// It also supports indexing by ranges and index.
struct ParseState<'a> {
/// The string to parse. This may be a substring of the "overall" matched string.
src: &'a [char],
/// The position within the overall string (for error reporting).
pos: usize,
}
impl<'a> ParseState<'a> {
/// new returns a new ParseState operating on the specified input string.
fn new(s: &'a [char]) -> ParseState<'a> {
ParseState { src: s, pos: 0 }
}
/// from returns a new ParseState operating on the [from..] sub-string of the current
/// ParseState.
fn from(&self, from: usize) -> ParseState<'a> {
self.sub(from, self.len())
}
/// pos returns the overall position within the input regex.
fn pos(&self) -> usize {
self.pos
}
/// sub returns a sub-ParseState containing [from..to] of the current one.
fn sub(&self, from: usize, to: usize) -> ParseState<'a> {
ParseState {
src: &self.src[from..to],
pos: self.pos + from,
}
}
/// len returns how many characters this ParseState contains.
fn len(&self) -> usize {
self.src.len()
}
/// err returns a formatted error string containing the specified message and the overall
/// position within the original input string.
fn err<T>(&self, s: &str, i: usize) -> Result<T, String> {
Err(format!("{} at :{}", s, self.pos + i))
}
}
impl<'a> Index<Range<usize>> for ParseState<'a> {
type Output = [char];
fn index(&self, r: Range<usize>) -> &Self::Output {
&self.src[r]
}
}
impl<'a> Index<RangeFull> for ParseState<'a> {
type Output = [char];
fn index(&self, r: RangeFull) -> &Self::Output {
&self.src[r]
}
}
impl<'a> Index<usize> for ParseState<'a> {
type Output = char;
fn index(&self, i: usize) -> &Self::Output {
&self.src[i]
}
}
impl<'a> Clone for ParseState<'a> {
fn clone(&self) -> ParseState<'a> {
ParseState {
src: self.src,
pos: self.pos,
}
}
}
/// parse_re is the parser entry point; like all parser functions, it returns either a pair of
/// (parsed pattern, new ParseState) or an error string.
fn parse_re<'a>(mut s: ParseState<'a>) -> Result<(Pattern, ParseState<'a>), String> {
// The stack assists us in parsing the linear parts of a regular expression, e.g. non-pattern
// characters, or character sets.
let mut stack = ParseStack::new();
loop {
if s.len() == 0 {
break;
}
match s[0] {
'.' => {
stack.push(Pattern::Any);
s = s.from(1);
}
'$' => {
if s.len() == 1 {
stack.push(Pattern::Anchor(AnchorLocation::End));
} else {
stack.push(Pattern::Char('$'))
}
s = s.from(1);
}
'^' => {
if s.pos() == 0 {
stack.push(Pattern::Anchor(AnchorLocation::Begin));
} else {
stack.push(Pattern::Char('^'));
}
s = s.from(1);
}
r @ '+' | r @ '*' | r @ '?' => {
if let Some(p) = stack.pop() {
let rep = match r {
'+' => Repetition::OnceOrMore(p),
'*' => Repetition::ZeroOrMore(p),
'?' => Repetition::ZeroOrOnce(p),
_ => unimplemented!(),
};
stack.push(Pattern::Repeated(Box::new(rep)));
s = s.from(1);
} else {
return s.err("+ without pattern to repeat", 0);
}
}
// Alternation: Parse the expression on the right of the pipe sign and push an
// alternation between what we've already seen and the stuff on the right.
'|' => {
let (rest, newst) = parse_re(s.from(1))?;
let left = stack.to_pattern();
stack = ParseStack::new();
stack.push(Pattern::Alternate(vec![left, rest]));
s = newst;
}
'(' => {
match split_in_parens(s.clone(), ROUND_PARENS) {
Some((parens, newst)) => {
// Parse the sub-regex within parentheses.
let (pat, rest) = parse_re(parens)?;
assert!(rest.len() == 0);
stack.push(Pattern::Submatch(Box::new(pat)));
// Set the current state to contain the string after the parentheses.
s = newst;
}
None => return s.err("unmatched (", s.len()),
}
}
')' => return s.err("unopened ')'", 0),
'[' => match parse_char_set(s) {
Ok((pat, newst)) => {
stack.push(pat);
s = newst;
}
Err(e) => return Err(e),
},
']' => return s.err("unopened ']'", 0),
'{' => {
match split_in_parens(s.clone(), CURLY_BRACKETS) {
Some((rep, newst)) => { | } else {
return s.err("repetition {} without pattern to repeat", 0);
}
}
None => return s.err("unmatched {", s.len()),
};
}
c => {
stack.push(Pattern::Char(c));
s = s.from(1);
}
}
}
Ok((stack.to_pattern(), s))
}
/// parse_char_set parses the character set at the start of the input state.
/// Valid states are [a], [ab], [a-z], [-a-z], [a-z-] and [a-fh-kl].
fn parse_char_set<'a>(s: ParseState<'a>) -> Result<(Pattern, ParseState<'a>), String> {
if let Some((cs, rest)) = split_in_parens(s.clone(), SQUARE_BRACKETS) {
let mut chars: Vec<char> = vec![];
let mut ranges: Vec<Pattern> = vec![];
let mut st = cs;
loop {
// Try to match a range "a-z" by looking for the dash; if no dash, add character to set
// and advance.
if st.len() >= 3 && st[1] == '-' {
ranges.push(Pattern::CharRange(st[0], st[2]));
st = st.from(3);
} else if st.len() > 0 {
chars.push(st[0]);
st = st.from(1);
} else {
break;
}
}
assert_eq!(st.len(), 0);
if chars.len() == 1 {
ranges.push(Pattern::Char(chars.pop().unwrap()));
} else if!chars.is_empty() {
ranges.push(Pattern::CharSet(chars));
}
if ranges.len() == 1 {
Ok((ranges.pop().unwrap(), rest))
} else {
let pat = Pattern::Alternate(ranges);
Ok((pat, rest))
}
} else {
s.err("unmatched [", s.len())
}
}
/// Parse a repetition spec inside curly braces: {1} | {1,} | {,1} | {1,2}
fn parse_specific_repetition<'a>(rep: ParseState<'a>, p: Pattern) -> Result<Pattern, String> {
let mut nparts = 0;
let mut parts: [Option<&[char]>; 2] = Default::default();
for p in rep[..].split(|c| *c == ',') {
parts[nparts] = Some(p);
nparts += 1;
if nparts == 2 {
break;
}
}
if nparts == 0 {
// {}
return rep.err("empty {} spec", 0);
} else if nparts == 1 {
// {1}
if let Ok(n) = u32::from_str(&String::from_iter(parts[0].unwrap().iter())) {
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p, n, None,
))));
} else {
return Err(format!(
"invalid repetition '{}'",
String::from_iter(rep[..].iter())
));
}
} else if nparts == 2 {
fn errtostr(r: Result<u32, std::num::ParseIntError>) -> Result<u32, String> {
match r {
Ok(u) => Ok(u),
Err(e) => Err(format!("{}", e)),
}
}
let (p0, p1) = (parts[0].unwrap(), parts[1].unwrap());
// {2,3}
if!p0.is_empty() &&!p1.is_empty() {
let min = errtostr(u32::from_str(&String::from_iter(p0.iter())))?;
let max = errtostr(u32::from_str(&String::from_iter(p1.iter())))?;
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p,
min,
Some(max),
))));
} else if p0.is_empty() &&!p1.is_empty() {
// {,3}
let min = 0;
let max = errtostr(u32::from_str(&String::from_iter(p1.iter())))?;
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p,
min,
Some(max),
))));
} else if!p0.is_empty() && p1.is_empty() {
// {3,}
let min = errtostr(u32::from_str(&String::from_iter(p0.iter())))?;
let repetition =
Pattern::Repeated(Box::new(Repetition::Specific(p.clone(), min, None)));
return Ok(Pattern::Concat(vec![
repetition,
Pattern::Repeated(Box::new(Repetition::ZeroOrMore(p))),
]));
}
}
Err(format!("invalid repetition pattern {:?}", &rep[..]))
}
/// Constants for generalizing parsing of parentheses.
const ROUND_PARENS: (char, char) = ('(', ')');
/// Constants for generalizing parsing of parentheses.
const SQUARE_BRACKETS: (char, char) = ('[', ']');
/// Constants for generalizing parsing of parentheses.
const CURLY_BRACKETS: (char, char) = ('{', '}');
/// split_in_parens returns two new ParseStates; the first one containing the contents of the
/// parenthesized clause starting at s[0], the second one containing the rest.
fn split_in_parens<'a>(
s: ParseState<'a>,
parens: (char, char),
) -> Option<(ParseState<'a>, ParseState<'a>)> {
if let Some(end) = find_closing_paren(s.clone(), parens) {
Some((s.sub(1, end), s.from(end + 1)))
} else {
None
}
}
/// find_closing_paren returns the index of the parenthesis closing the opening parenthesis at the
/// beginning of the state's string.
fn find_closing_paren<'a>(s: ParseState<'a>, parens: (char, char)) -> Option<usize> {
if s[0]!= parens.0 {
return None;
}
let mut count = 0;
for i in 0..s.len() {
if s[i] == parens.0 {
count += 1;
} else if s[i] == parens.1 {
count -= 1;
}
if count == 0 {
return Some(i);
}
}
None
}
#[cfg(test)]
mod tests {
use super::*;
use crate::compile::*;
use crate::repr::*;
use crate::state::dot;
#[test]
fn test_find_closing_paren() {
for case in &[
("(abc)de", Some(4)),
("()a", Some(1)),
("(abcd)", Some(5)),
("(abc", None),
] {
let src: Vec<char> = case.0.chars().collect();
assert_eq!(
find_closing_paren(ParseState::new(src.as_ref()), ROUND_PARENS),
case.1
);
}
}
#[test]
fn test_parse_charset() {
for case in &[
("[a]", Pattern::Char('a')),
("[ab]", Pattern::CharSet(vec!['a', 'b'])),
("[ba-]", Pattern::CharSet(vec!['b', 'a', '-'])),
("[a-z]", Pattern::CharRange('a', 'z')),
(
"[a-z-]",
Pattern::Alternate(vec![Pattern::CharRange('a', 'z'), Pattern::Char('-')]),
),
(
"[-a-z-]",
Pattern::Alternate(vec![
Pattern::CharRange('a', 'z'),
Pattern::CharSet(vec!['-', '-']),
]),
),
(
"[a-zA-Z]",
Pattern::Alternate(vec![
Pattern::CharRange('a', 'z'),
Pattern::CharRange('A', 'Z'),
]),
),
(
"[a-zA-Z-]",
Pattern::Alternate(vec![
Pattern::CharRange('a', 'z'),
Pattern::CharRange('A', 'Z'),
Pattern::Char('-'),
]),
),
] {
let src: Vec<char> = case.0.chars().collect();
let st = ParseState::new(&src);
assert_eq!(parse_char_set(st).unwrap().0, case.1);
}
}
#[test]
fn test_parse_subs() {
let case1 = (
"a(b)c",
Pattern::Concat(vec![
Pattern::Char('a'),
Pattern::Submatch(Box::new(Pattern::Char('b'))),
Pattern::Char('c'),
]),
);
let case2 = ("(b)", Pattern::Submatch(Box::new(Pattern::Char('b'))));
for c in &[case1, case2] {
assert_eq!(c.1, parse(c.0).unwrap());
}
}
#[test]
fn test_parse_res() {
let case1 = (
"a(Bcd)e",
Pattern::Concat(vec![
Pattern::Char('a'),
Pattern::Submatch(Box::new(Pattern::Concat(vec![
Pattern::Char('B'),
Pattern::Char('c'),
Pattern::Char('d'),
]))),
Pattern::Char('e'),
]),
);
for c in &[case1] {
assert_eq!(c.1, parse(c.0).unwrap());
}
}
#[test]
fn test_parse_res_errors() {
let case1 = ("ac)d", "unopened ')' at :2");
let case2 = ("(ac)d)", "unopened ')' at :5");
let case3 = ("[ac]d]", "unopened ']' at :5");
let case4 = ("(ac)d]", "unopened ']' at :5");
for c in &[case1, case2, case3, case4] {
assert_eq!(c.1, parse(c.0).unwrap_err());
}
}
#[test]
fn test_parse_repetition_manual() {
println!(
"digraph st {{ {} }}",
dot(&start_compile(&parse("[abc]{1,5}").unwrap())) | if let Some(p) = stack.pop() {
let rep = parse_specific_repetition(rep, p)?;
stack.push(rep);
s = newst; | random_line_split |
parse.rs | ::FromStr;
use crate::repr::{AnchorLocation, Pattern, Repetition};
/// The entry point for this module: Parse a string into a `Pattern` that can be optimized and/or
/// compiled.
pub fn parse(s: &str) -> Result<Pattern, String> {
let src: Vec<char> = s.chars().collect();
parse_re(ParseState::new(&src)).map(|t| t.0)
}
/// ParseStack contains already parsed elements of a regular expression, and is used for parsing
/// textual regular expressions (as the parsing algorithm is stack-based). It can be converted to
/// an Pattern.
struct ParseStack {
s: Vec<Pattern>,
}
impl ParseStack {
fn new() -> ParseStack {
ParseStack {
s: Vec::with_capacity(4),
}
}
fn push(&mut self, p: Pattern) {
self.s.push(p)
}
fn pop(&mut self) -> Option<Pattern> {
self.s.pop()
}
fn empty(&self) -> bool {
self.s.is_empty()
}
fn to_pattern(mut self) -> Pattern {
if self.s.len() > 1 {
Pattern::Concat(self.s)
} else if self.s.len() == 1 {
self.s.pop().unwrap()
} else {
panic!("empty stack")
}
}
}
/// State of the parser, quite a simple struct. It contains the current substring that a parser
/// function is concerned with as well as the position within the overall parsed string, so that
/// useful positions can be reported to users. In addition, it provides functions to cheaply create
/// "sub-ParseStates" containing a substring of its current string.
///
/// It also supports indexing by ranges and index.
struct | <'a> {
/// The string to parse. This may be a substring of the "overall" matched string.
src: &'a [char],
/// The position within the overall string (for error reporting).
pos: usize,
}
impl<'a> ParseState<'a> {
/// new returns a new ParseState operating on the specified input string.
fn new(s: &'a [char]) -> ParseState<'a> {
ParseState { src: s, pos: 0 }
}
/// from returns a new ParseState operating on the [from..] sub-string of the current
/// ParseState.
fn from(&self, from: usize) -> ParseState<'a> {
self.sub(from, self.len())
}
/// pos returns the overall position within the input regex.
fn pos(&self) -> usize {
self.pos
}
/// sub returns a sub-ParseState containing [from..to] of the current one.
fn sub(&self, from: usize, to: usize) -> ParseState<'a> {
ParseState {
src: &self.src[from..to],
pos: self.pos + from,
}
}
/// len returns how many characters this ParseState contains.
fn len(&self) -> usize {
self.src.len()
}
/// err returns a formatted error string containing the specified message and the overall
/// position within the original input string.
fn err<T>(&self, s: &str, i: usize) -> Result<T, String> {
Err(format!("{} at :{}", s, self.pos + i))
}
}
impl<'a> Index<Range<usize>> for ParseState<'a> {
type Output = [char];
fn index(&self, r: Range<usize>) -> &Self::Output {
&self.src[r]
}
}
impl<'a> Index<RangeFull> for ParseState<'a> {
type Output = [char];
fn index(&self, r: RangeFull) -> &Self::Output {
&self.src[r]
}
}
impl<'a> Index<usize> for ParseState<'a> {
type Output = char;
fn index(&self, i: usize) -> &Self::Output {
&self.src[i]
}
}
impl<'a> Clone for ParseState<'a> {
fn clone(&self) -> ParseState<'a> {
ParseState {
src: self.src,
pos: self.pos,
}
}
}
/// parse_re is the parser entry point; like all parser functions, it returns either a pair of
/// (parsed pattern, new ParseState) or an error string.
fn parse_re<'a>(mut s: ParseState<'a>) -> Result<(Pattern, ParseState<'a>), String> {
// The stack assists us in parsing the linear parts of a regular expression, e.g. non-pattern
// characters, or character sets.
let mut stack = ParseStack::new();
loop {
if s.len() == 0 {
break;
}
match s[0] {
'.' => {
stack.push(Pattern::Any);
s = s.from(1);
}
'$' => {
if s.len() == 1 {
stack.push(Pattern::Anchor(AnchorLocation::End));
} else {
stack.push(Pattern::Char('$'))
}
s = s.from(1);
}
'^' => {
if s.pos() == 0 {
stack.push(Pattern::Anchor(AnchorLocation::Begin));
} else {
stack.push(Pattern::Char('^'));
}
s = s.from(1);
}
r @ '+' | r @ '*' | r @ '?' => {
if let Some(p) = stack.pop() {
let rep = match r {
'+' => Repetition::OnceOrMore(p),
'*' => Repetition::ZeroOrMore(p),
'?' => Repetition::ZeroOrOnce(p),
_ => unimplemented!(),
};
stack.push(Pattern::Repeated(Box::new(rep)));
s = s.from(1);
} else {
return s.err("+ without pattern to repeat", 0);
}
}
// Alternation: Parse the expression on the right of the pipe sign and push an
// alternation between what we've already seen and the stuff on the right.
'|' => {
let (rest, newst) = parse_re(s.from(1))?;
let left = stack.to_pattern();
stack = ParseStack::new();
stack.push(Pattern::Alternate(vec![left, rest]));
s = newst;
}
'(' => {
match split_in_parens(s.clone(), ROUND_PARENS) {
Some((parens, newst)) => {
// Parse the sub-regex within parentheses.
let (pat, rest) = parse_re(parens)?;
assert!(rest.len() == 0);
stack.push(Pattern::Submatch(Box::new(pat)));
// Set the current state to contain the string after the parentheses.
s = newst;
}
None => return s.err("unmatched (", s.len()),
}
}
')' => return s.err("unopened ')'", 0),
'[' => match parse_char_set(s) {
Ok((pat, newst)) => {
stack.push(pat);
s = newst;
}
Err(e) => return Err(e),
},
']' => return s.err("unopened ']'", 0),
'{' => {
match split_in_parens(s.clone(), CURLY_BRACKETS) {
Some((rep, newst)) => {
if let Some(p) = stack.pop() {
let rep = parse_specific_repetition(rep, p)?;
stack.push(rep);
s = newst;
} else {
return s.err("repetition {} without pattern to repeat", 0);
}
}
None => return s.err("unmatched {", s.len()),
};
}
c => {
stack.push(Pattern::Char(c));
s = s.from(1);
}
}
}
Ok((stack.to_pattern(), s))
}
/// parse_char_set parses the character set at the start of the input state.
/// Valid states are [a], [ab], [a-z], [-a-z], [a-z-] and [a-fh-kl].
fn parse_char_set<'a>(s: ParseState<'a>) -> Result<(Pattern, ParseState<'a>), String> {
if let Some((cs, rest)) = split_in_parens(s.clone(), SQUARE_BRACKETS) {
let mut chars: Vec<char> = vec![];
let mut ranges: Vec<Pattern> = vec![];
let mut st = cs;
loop {
// Try to match a range "a-z" by looking for the dash; if no dash, add character to set
// and advance.
if st.len() >= 3 && st[1] == '-' {
ranges.push(Pattern::CharRange(st[0], st[2]));
st = st.from(3);
} else if st.len() > 0 {
chars.push(st[0]);
st = st.from(1);
} else {
break;
}
}
assert_eq!(st.len(), 0);
if chars.len() == 1 {
ranges.push(Pattern::Char(chars.pop().unwrap()));
} else if!chars.is_empty() {
ranges.push(Pattern::CharSet(chars));
}
if ranges.len() == 1 {
Ok((ranges.pop().unwrap(), rest))
} else {
let pat = Pattern::Alternate(ranges);
Ok((pat, rest))
}
} else {
s.err("unmatched [", s.len())
}
}
/// Parse a repetition spec inside curly braces: {1} | {1,} | {,1} | {1,2}
fn parse_specific_repetition<'a>(rep: ParseState<'a>, p: Pattern) -> Result<Pattern, String> {
let mut nparts = 0;
let mut parts: [Option<&[char]>; 2] = Default::default();
for p in rep[..].split(|c| *c == ',') {
parts[nparts] = Some(p);
nparts += 1;
if nparts == 2 {
break;
}
}
if nparts == 0 {
// {}
return rep.err("empty {} spec", 0);
} else if nparts == 1 {
// {1}
if let Ok(n) = u32::from_str(&String::from_iter(parts[0].unwrap().iter())) {
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p, n, None,
))));
} else {
return Err(format!(
"invalid repetition '{}'",
String::from_iter(rep[..].iter())
));
}
} else if nparts == 2 {
fn errtostr(r: Result<u32, std::num::ParseIntError>) -> Result<u32, String> {
match r {
Ok(u) => Ok(u),
Err(e) => Err(format!("{}", e)),
}
}
let (p0, p1) = (parts[0].unwrap(), parts[1].unwrap());
// {2,3}
if!p0.is_empty() &&!p1.is_empty() {
let min = errtostr(u32::from_str(&String::from_iter(p0.iter())))?;
let max = errtostr(u32::from_str(&String::from_iter(p1.iter())))?;
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p,
min,
Some(max),
))));
} else if p0.is_empty() &&!p1.is_empty() {
// {,3}
let min = 0;
let max = errtostr(u32::from_str(&String::from_iter(p1.iter())))?;
return Ok(Pattern::Repeated(Box::new(Repetition::Specific(
p,
min,
Some(max),
))));
} else if!p0.is_empty() && p1.is_empty() {
// {3,}
let min = errtostr(u32::from_str(&String::from_iter(p0.iter())))?;
let repetition =
Pattern::Repeated(Box::new(Repetition::Specific(p.clone(), min, None)));
return Ok(Pattern::Concat(vec![
repetition,
Pattern::Repeated(Box::new(Repetition::ZeroOrMore(p))),
]));
}
}
Err(format!("invalid repetition pattern {:?}", &rep[..]))
}
/// Constants for generalizing parsing of parentheses.
const ROUND_PARENS: (char, char) = ('(', ')');
/// Constants for generalizing parsing of parentheses.
const SQUARE_BRACKETS: (char, char) = ('[', ']');
/// Constants for generalizing parsing of parentheses.
const CURLY_BRACKETS: (char, char) = ('{', '}');
/// split_in_parens returns two new ParseStates; the first one containing the contents of the
/// parenthesized clause starting at s[0], the second one containing the rest.
fn split_in_parens<'a>(
s: ParseState<'a>,
parens: (char, char),
) -> Option<(ParseState<'a>, ParseState<'a>)> {
if let Some(end) = find_closing_paren(s.clone(), parens) {
Some((s.sub(1, end), s.from(end + 1)))
} else {
None
}
}
/// find_closing_paren returns the index of the parenthesis closing the opening parenthesis at the
/// beginning of the state's string.
fn find_closing_paren<'a>(s: ParseState<'a>, parens: (char, char)) -> Option<usize> {
if s[0]!= parens.0 {
return None;
}
let mut count = 0;
for i in 0..s.len() {
if s[i] == parens.0 {
count += 1;
} else if s[i] == parens.1 {
count -= 1;
}
if count == 0 {
return Some(i);
}
}
None
}
#[cfg(test)]
mod tests {
use super::*;
use crate::compile::*;
use crate::repr::*;
use crate::state::dot;
#[test]
fn test_find_closing_paren() {
for case in &[
("(abc)de", Some(4)),
("()a", Some(1)),
("(abcd)", Some(5)),
("(abc", None),
] {
let src: Vec<char> = case.0.chars().collect();
assert_eq!(
find_closing_paren(ParseState::new(src.as_ref()), ROUND_PARENS),
case.1
);
}
}
#[test]
fn test_parse_charset() {
for case in &[
("[a]", Pattern::Char('a')),
("[ab]", Pattern::CharSet(vec!['a', 'b'])),
("[ba-]", Pattern::CharSet(vec!['b', 'a', '-'])),
("[a-z]", Pattern::CharRange('a', 'z')),
(
"[a-z-]",
Pattern::Alternate(vec![Pattern::CharRange('a', 'z'), Pattern::Char('-')]),
),
(
"[-a-z-]",
Pattern::Alternate(vec![
Pattern::CharRange('a', 'z'),
Pattern::CharSet(vec!['-', '-']),
]),
),
(
"[a-zA-Z]",
Pattern::Alternate(vec![
Pattern::CharRange('a', 'z'),
Pattern::CharRange('A', 'Z'),
]),
),
(
"[a-zA-Z-]",
Pattern::Alternate(vec![
Pattern::CharRange('a', 'z'),
Pattern::CharRange('A', 'Z'),
Pattern::Char('-'),
]),
),
] {
let src: Vec<char> = case.0.chars().collect();
let st = ParseState::new(&src);
assert_eq!(parse_char_set(st).unwrap().0, case.1);
}
}
#[test]
fn test_parse_subs() {
let case1 = (
"a(b)c",
Pattern::Concat(vec![
Pattern::Char('a'),
Pattern::Submatch(Box::new(Pattern::Char('b'))),
Pattern::Char('c'),
]),
);
let case2 = ("(b)", Pattern::Submatch(Box::new(Pattern::Char('b'))));
for c in &[case1, case2] {
assert_eq!(c.1, parse(c.0).unwrap());
}
}
#[test]
fn test_parse_res() {
let case1 = (
"a(Bcd)e",
Pattern::Concat(vec![
Pattern::Char('a'),
Pattern::Submatch(Box::new(Pattern::Concat(vec![
Pattern::Char('B'),
Pattern::Char('c'),
Pattern::Char('d'),
]))),
Pattern::Char('e'),
]),
);
for c in &[case1] {
assert_eq!(c.1, parse(c.0).unwrap());
}
}
#[test]
fn test_parse_res_errors() {
let case1 = ("ac)d", "unopened ')' at :2");
let case2 = ("(ac)d)", "unopened ')' at :5");
let case3 = ("[ac]d]", "unopened ']' at :5");
let case4 = ("(ac)d]", "unopened ']' at :5");
for c in &[case1, case2, case3, case4] {
assert_eq!(c.1, parse(c.0).unwrap_err());
}
}
#[test]
fn test_parse_repetition_manual() {
println!(
"digraph st {{ {} }}",
dot(&start_compile(&parse("[abc]{1,5}"). | ParseState | identifier_name |
fib.rs | rev ref
paren: WeakNode<I, T>,
child: Node<I, T>,
/// Indicate that it has lost a child
marked: bool,
}
////////////////////////////////////////////////////////////////////////////////
//// Implementation
impl<I: Debug, T: Debug> Debug for Node_<I, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{:?}[{:?}]{}",
self.idx,
self.val,
if self.marked { " X" } else { "" }
)
}
}
impl_node!();
impl<I, T> Node<I, T> {
fn children(&self) -> Vec<Self> {
let mut child = child!(self);
let mut res = vec![];
while child.is_some() {
res.push(child.clone());
child = right!(child);
}
res
}
/// remove paren, left and right
fn purge_as_root(&self) {
paren!(self, WeakNode::none());
left!(self, WeakNode::none());
right!(self, Node::none());
}
fn cut_child(&self, x: Node<I, T>) {
if!left!(x).is_none() {
right!(left!(x).upgrade(), right!(x));
} else {
debug_assert!(child!(self).rc_eq(&x));
child!(self, right!(x));
}
if!right!(x).is_none() {
left!(right!(x), left!(x));
}
rank!(self, rank!(self) - 1);
x.purge_as_root();
}
/// replace with new val, return old val
fn replace_key(&self, val: T) -> T
where
I: Debug,
T: Debug
{
replace(val_mut!(self), val)
}
fn replace(&mut self, x: Self) -> Self {
let old = Self(self.0.clone());
self.0 = x.0;
old
}
#[cfg(test)]
#[allow(unused)]
fn validate_ref(&self)
where
I: Clone,
{
assert!(self.is_some());
let _self_idx = idx!(self);
/* validate right sibling */
let rh = right!(self);
if rh.is_some() {
let _rh_idx = idx!(rh);
let rhlf = left!(rh).upgrade();
assert!(rhlf.rc_eq(self));
assert!(rhlf.is_some());
rh.validate_ref();
}
/* validate children */
let child = child!(self);
if child.is_some() {
let _child_idx = idx!(child);
let cpw = paren!(child);
assert!(!cpw.is_none());
let cp = cpw.upgrade();
assert!(cp.rc_eq(self));
assert!(cp.is_some());
child.validate_ref();
}
}
}
impl<I: Debug, T: Debug> Debug for Node<I, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.is_none() {
write!(f, "None")
} else {
write!(f, "{:?}", self.0.as_ref().unwrap().as_ref().borrow())
}
}
}
impl<I: Debug, T: Debug> Display for Node<I, T> {
fn | (&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(f, "R({:?}) ", self)?;
let mut curq = vec![(self.clone(), self.children())];
loop {
let mut nxtq = vec![];
for (p, children) in curq {
if children.is_empty() {
break;
}
write!(f, "P({:?}) ", p)?;
let childlen = children.len();
for (i, child) in children.into_iter().enumerate() {
write!(f, "{:?}", child)?;
if i < childlen - 1 {
write!(f, ", ")?;
}
nxtq.push((child.clone(), child.children()));
}
write!(f, "; ")?;
}
if!nxtq.is_empty() {
writeln!(f)?;
curq = nxtq;
} else {
break;
}
}
Ok(())
}
}
impl<I, T> FibHeap<I, T>
where
I: Eq + Hash + Clone + Debug,
T: Ord + Debug
{
////////////////////////////////////////////////////////////////////////////
//// Public method
pub fn new() -> Self {
Self {
len: 0,
rcnt: 0,
min: Node::none(),
nodes: HashMap::new(),
}
}
pub fn len(&self) -> usize {
self.len
}
/// Same index node would be overidden
pub fn push(&mut self, i: I, v: T)
{
let node = node!(i.clone(), v);
self.nodes.insert(i, node.clone());
self.push_into_roots(node.clone());
if val!(node) < val!(self.min) {
self.min = node;
}
self.len += 1;
}
/// Amortized cost O(rank(H))
///
/// trees(H') <= rank(H) + 1 # since no two trees have same rank.
///
/// delete-min
pub fn pop_item(&mut self) -> Option<(I, T)>
{
if self.min.is_none() {
return None;
}
self.len -= 1;
/* push children of oldmin into roots */
for child in self.min.children() {
self.push_into_roots(child.clone());
}
/* update min */
let newmin = self.roots()[1..]
.into_iter()
.min_by_key(|&sib| val!(sib))
.cloned()
.unwrap_or_default();
/* just del old min */
self.remove_from_roots(self.min.clone());
let oldmin = self.min.replace(newmin);
self.consolidate();
Some((
self.remove_from_index(&oldmin),
unwrap_into!(oldmin).val
))
}
/// merge same rank trees recusively
pub fn consolidate(&mut self) {
let mut rank: HashMap<usize, Node<I, T>> = hashmap!();
for mut sib in self.roots() {
while let Some(x) = rank.remove(&rank!(sib)) {
sib = self.merge_same_rank_root(x, sib);
}
rank.insert(rank!(sib), sib);
}
}
/// Return oldval, alias of ReplaceOrPush
///
/// Exec push if the val doesn't exist.
///
pub fn insert(&mut self, i: I, v: T) -> Option<T>
where
I: Eq + Hash + Clone,
T: Ord + Debug
{
match self.nodes.entry(i.clone()) {
Occupied(ent) => {
let x = ent.get().clone();
let oldv = x.replace_key(v);
match val!(x).cmp(&oldv) {
Less => self.decrease_key_(x),
Equal => (),
Greater => self.increase_key_(x),
}
Some(oldv)
}
Vacant(_ent) => {
self.push(i, v);
None
}
}
}
pub fn union(&mut self, _other: Self) {
unimplemented!("link roots, but not O(1) for link index reference")
}
pub fn delete<Q: AsRef<I>>(&mut self, _i: Q) -> Option<T> {
unimplemented!("1. decrease-val to -infi, 2. pop");
}
////////////////////////////////////////////////////////////////////////////
//// Extra functional method
/// Return oldval
///
pub fn decrease_key(&mut self, i: I, v: T) -> Option<T>
where
I: Eq + Hash + Clone,
T: Debug
{
let x;
match self.nodes.entry(i.clone()) {
Occupied(ent) => {
x = ent.get().clone();
let oldv = x.replace_key(v);
self.decrease_key_(x);
Some(oldv)
}
Vacant(_ent) => None,
}
}
pub fn top_item(&self) -> Option<(I, &T)>
where
I: Eq + Clone
{
if self.min.is_some() {
Some((idx!(self.min), val!(self.min)))
} else {
None
}
}
pub fn top(&self) -> Option<&T> {
self.top_item().map(|x| x.1)
}
pub fn pop(&mut self) -> Option<T> {
self.pop_item().map(|x| x.1)
}
pub fn get<Q>(&self, i: &Q) -> Option<&T>
where
I: Borrow<Q>,
Q: Ord + Hash +?Sized,
{
self.nodes.get(i).map(|node| val!(node))
}
pub fn indexes(&self) -> impl Iterator<Item = &I> {
self.nodes.keys()
}
////////////////////////////////////////////////////////////////////////////
//// Assistant method
fn decrease_key_(&mut self, x: Node<I, T>) {
let ent;
let p = paren!(x);
if!p.is_none() && val!(x) < val!(p.upgrade()) {
// 假装x节点本身也是一个符合条件的父节点
marked!(x, true);
ent = x.downgrade();
} else {
ent = WeakNode::none();
}
self.cut_meld_unmark_to_roots(ent);
if val!(x) < val!(self.min) {
debug_assert!(paren!(x).is_none());
self.min = x;
}
}
/// WARNING: O(rank) = O(n)
fn increase_key_(&mut self, x: Node<I, T>) {
let ent;
let mut children_lost = if marked!(x) { 1 } else { 0 };
for child in x.children() {
if val!(child) < val!(x) {
x.cut_child(child.clone());
self.push_into_roots(child.clone());
marked!(child, false);
children_lost += 1;
}
}
match children_lost.cmp(&1) {
Less => ent = WeakNode::none(),
Equal => {
marked!(x, true);
ent = paren!(x);
}
Greater => {
marked!(x, true);
ent = x.downgrade();
}
}
self.cut_meld_unmark_to_roots(ent);
// WARNING: O(rank), update self.min
if x.rc_eq(&self.min) {
let min_node =
self.roots().into_iter().min_by_key(|x| val!(x)).unwrap();
self.min = min_node;
}
}
fn cut_meld_unmark_to_roots(&mut self, ent: WeakNode<I, T>) {
if ent.is_none() {
return;
}
let mut x = ent.upgrade();
let mut p = paren!(x);
while marked!(x) &&!p.is_none() {
let strongp = p.upgrade();
strongp.cut_child(x.clone());
self.push_into_roots(x.clone());
marked!(x, false);
x = strongp;
p = paren!(x);
}
// 定义上不标记根,但这应该是无所谓的,标记对于可能的pop导致的树规整后的树情况更精确
marked!(x, true);
}
fn remove_from_index(&mut self, x: &Node<I, T>) -> I
where
I: Eq + Hash + Clone
{
let k = idx!(x);
self.nodes.remove(&k);
k
}
/// insert at sib of self.min, with purge
fn push_into_roots(&mut self, x: Node<I, T>) {
debug_assert!(!self.min.rc_eq(&x));
self.rcnt += 1;
x.purge_as_root();
if self.min.is_none() {
self.min = x;
left!(self.min, self.min.downgrade());
right!(self.min, self.min.clone());
} else {
debug_assert!(right!(self.min).is_some());
right!(x, right!(self.min));
left!(x, self.min.downgrade());
right!(self.min, x.clone());
left!(right!(x), x.downgrade());
}
}
/// from self.min go through all roots
fn roots(&self) -> Vec<Node<I, T>> {
let mut sibs = vec![];
if self.min.is_none() {
return sibs;
} else {
sibs.push(self.min.clone());
}
let mut sib = right!(self.min);
while!sib.rc_eq(&self.min) {
sibs.push(sib.clone());
sib = right!(sib);
}
sibs
}
fn remove_from_roots(&mut self, x: Node<I, T>) {
self.rcnt -= 1;
if self.rcnt > 0 {
right!(left!(x).upgrade(), right!(x));
left!(right!(x), left!(x));
}
x.purge_as_root();
}
/// update self.rcnt
fn merge_same_rank_root(
&mut self,
mut x: Node<I, T>,
mut y: Node<I, T>,
) -> Node<I, T> {
debug_assert_eq!(rank!(x), rank!(y));
// let x be parent
if val!(y) < val!(x) || val!(y) == val!(x) && y.rc_eq(&self.min) {
(x, y) = (y, x);
}
// remove y from roots
self.remove_from_roots(y.clone());
// link y to x child
right!(y, child!(x));
if child!(x).is_some() {
left!(child!(x), y.downgrade());
}
// link y to x
paren!(y, x.downgrade());
child!(x, y.clone());
rank!(x, rank!(x) + 1);
x
}
////////////////////////////////////////////////////////////////////////////
//// Validation method
/// Validate nodes are not None or Failed to upgrade to Rc
#[cfg(test)]
#[allow(unused)]
pub(crate) fn validate_ref(&self) {
if self.len() == 0 {
return;
}
/* validate roots */
for root in self.roots() {
assert!(root.is_some());
let rh = right!(root);
assert!(rh.is_some());
let wlf = left!(root);
assert!(!wlf.is_none());
let left = wlf.upgrade();
assert!(left.is_some());
let child = child!(root);
if child.is_some() {
child.validate_ref();
}
}
}
}
impl<I: Eq + Hash + Clone, T: Clone> FibHeap<I, T> {
fn overall_clone(
&self,
nodes: &mut HashMap<I, Node<I, T>>,
x: Node<I, T>,
) -> Node<I, T> {
if x.is_none() {
return Node::none();
}
// overall clone node body
let newx = node!(idx!(x), val!(x).clone(), rank!(x), marked!(x));
// update index reference
nodes.insert(idx!(x), newx.clone());
// recursive call it
let mut childen_iter = x.children().into_iter();
if let Some(child) = childen_iter.next() {
let newchild = self.overall_clone(nodes, child);
child!(newx, newchild.clone());
paren!(newchild, newx.downgrade());
let mut cur = newchild;
for child in childen_iter {
let newchild = self.overall_clone(nodes, child);
right!(cur, newchild.clone());
left!(newchild, cur.downgrade());
cur = newchild;
}
}
newx
}
}
impl<I, T> Drop for FibHeap<I, T> {
fn drop(&mut self) {
if self.len > 0 {
// break circle dependency to enable drop
let tail = left!(self.min).upgrade();
right!(tail, Node::none());
self.nodes.clear();
}
}
}
impl<T: Debug, K: Debug> Display for FibHeap<T, K> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut sib = self.min.clone();
for i in 1..=self.rcnt {
writeln!(f, "{} ({i:03}) {}", "-".repeat(28), "-".repeat(28))?;
// writeln!(f)?;
if sib.rc_eq(&self.min) {
write!(f, "M=>")?;
}
writeln!(f, "{}", sib)?;
debug_assert!(sib.is_some());
sib = right!(sib);
}
writeln!(f, "{}>> end <<{}", "-".repeat(28), "-".repeat(28))?;
Ok(())
}
}
impl<I: Ord + Hash + Clone + Debug, T: Ord + Clone + Debug> Clone for FibHeap<I, T> {
fn clone(&self) -> Self {
let len = self.len;
let rcnt = self.rcnt;
let mut nodes = HashMap::new();
let min;
let mut roots_iter | fmt | identifier_name |
fib.rs | rev ref
paren: WeakNode<I, T>,
child: Node<I, T>,
/// Indicate that it has lost a child
marked: bool,
}
////////////////////////////////////////////////////////////////////////////////
//// Implementation
impl<I: Debug, T: Debug> Debug for Node_<I, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{:?}[{:?}]{}",
self.idx,
self.val,
if self.marked { " X" } else { "" }
)
}
}
impl_node!();
impl<I, T> Node<I, T> {
fn children(&self) -> Vec<Self> {
let mut child = child!(self);
let mut res = vec![];
while child.is_some() {
res.push(child.clone());
child = right!(child);
}
res
}
/// remove paren, left and right
fn purge_as_root(&self) {
paren!(self, WeakNode::none());
left!(self, WeakNode::none());
right!(self, Node::none());
}
fn cut_child(&self, x: Node<I, T>) |
/// replace with new val, return old val
fn replace_key(&self, val: T) -> T
where
I: Debug,
T: Debug
{
replace(val_mut!(self), val)
}
fn replace(&mut self, x: Self) -> Self {
let old = Self(self.0.clone());
self.0 = x.0;
old
}
#[cfg(test)]
#[allow(unused)]
fn validate_ref(&self)
where
I: Clone,
{
assert!(self.is_some());
let _self_idx = idx!(self);
/* validate right sibling */
let rh = right!(self);
if rh.is_some() {
let _rh_idx = idx!(rh);
let rhlf = left!(rh).upgrade();
assert!(rhlf.rc_eq(self));
assert!(rhlf.is_some());
rh.validate_ref();
}
/* validate children */
let child = child!(self);
if child.is_some() {
let _child_idx = idx!(child);
let cpw = paren!(child);
assert!(!cpw.is_none());
let cp = cpw.upgrade();
assert!(cp.rc_eq(self));
assert!(cp.is_some());
child.validate_ref();
}
}
}
impl<I: Debug, T: Debug> Debug for Node<I, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.is_none() {
write!(f, "None")
} else {
write!(f, "{:?}", self.0.as_ref().unwrap().as_ref().borrow())
}
}
}
impl<I: Debug, T: Debug> Display for Node<I, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(f, "R({:?}) ", self)?;
let mut curq = vec![(self.clone(), self.children())];
loop {
let mut nxtq = vec![];
for (p, children) in curq {
if children.is_empty() {
break;
}
write!(f, "P({:?}) ", p)?;
let childlen = children.len();
for (i, child) in children.into_iter().enumerate() {
write!(f, "{:?}", child)?;
if i < childlen - 1 {
write!(f, ", ")?;
}
nxtq.push((child.clone(), child.children()));
}
write!(f, "; ")?;
}
if!nxtq.is_empty() {
writeln!(f)?;
curq = nxtq;
} else {
break;
}
}
Ok(())
}
}
impl<I, T> FibHeap<I, T>
where
I: Eq + Hash + Clone + Debug,
T: Ord + Debug
{
////////////////////////////////////////////////////////////////////////////
//// Public method
pub fn new() -> Self {
Self {
len: 0,
rcnt: 0,
min: Node::none(),
nodes: HashMap::new(),
}
}
pub fn len(&self) -> usize {
self.len
}
/// Same index node would be overidden
pub fn push(&mut self, i: I, v: T)
{
let node = node!(i.clone(), v);
self.nodes.insert(i, node.clone());
self.push_into_roots(node.clone());
if val!(node) < val!(self.min) {
self.min = node;
}
self.len += 1;
}
/// Amortized cost O(rank(H))
///
/// trees(H') <= rank(H) + 1 # since no two trees have same rank.
///
/// delete-min
pub fn pop_item(&mut self) -> Option<(I, T)>
{
if self.min.is_none() {
return None;
}
self.len -= 1;
/* push children of oldmin into roots */
for child in self.min.children() {
self.push_into_roots(child.clone());
}
/* update min */
let newmin = self.roots()[1..]
.into_iter()
.min_by_key(|&sib| val!(sib))
.cloned()
.unwrap_or_default();
/* just del old min */
self.remove_from_roots(self.min.clone());
let oldmin = self.min.replace(newmin);
self.consolidate();
Some((
self.remove_from_index(&oldmin),
unwrap_into!(oldmin).val
))
}
/// merge same rank trees recusively
pub fn consolidate(&mut self) {
let mut rank: HashMap<usize, Node<I, T>> = hashmap!();
for mut sib in self.roots() {
while let Some(x) = rank.remove(&rank!(sib)) {
sib = self.merge_same_rank_root(x, sib);
}
rank.insert(rank!(sib), sib);
}
}
/// Return oldval, alias of ReplaceOrPush
///
/// Exec push if the val doesn't exist.
///
pub fn insert(&mut self, i: I, v: T) -> Option<T>
where
I: Eq + Hash + Clone,
T: Ord + Debug
{
match self.nodes.entry(i.clone()) {
Occupied(ent) => {
let x = ent.get().clone();
let oldv = x.replace_key(v);
match val!(x).cmp(&oldv) {
Less => self.decrease_key_(x),
Equal => (),
Greater => self.increase_key_(x),
}
Some(oldv)
}
Vacant(_ent) => {
self.push(i, v);
None
}
}
}
pub fn union(&mut self, _other: Self) {
unimplemented!("link roots, but not O(1) for link index reference")
}
pub fn delete<Q: AsRef<I>>(&mut self, _i: Q) -> Option<T> {
unimplemented!("1. decrease-val to -infi, 2. pop");
}
////////////////////////////////////////////////////////////////////////////
//// Extra functional method
/// Return oldval
///
pub fn decrease_key(&mut self, i: I, v: T) -> Option<T>
where
I: Eq + Hash + Clone,
T: Debug
{
let x;
match self.nodes.entry(i.clone()) {
Occupied(ent) => {
x = ent.get().clone();
let oldv = x.replace_key(v);
self.decrease_key_(x);
Some(oldv)
}
Vacant(_ent) => None,
}
}
pub fn top_item(&self) -> Option<(I, &T)>
where
I: Eq + Clone
{
if self.min.is_some() {
Some((idx!(self.min), val!(self.min)))
} else {
None
}
}
pub fn top(&self) -> Option<&T> {
self.top_item().map(|x| x.1)
}
pub fn pop(&mut self) -> Option<T> {
self.pop_item().map(|x| x.1)
}
pub fn get<Q>(&self, i: &Q) -> Option<&T>
where
I: Borrow<Q>,
Q: Ord + Hash +?Sized,
{
self.nodes.get(i).map(|node| val!(node))
}
pub fn indexes(&self) -> impl Iterator<Item = &I> {
self.nodes.keys()
}
////////////////////////////////////////////////////////////////////////////
//// Assistant method
fn decrease_key_(&mut self, x: Node<I, T>) {
let ent;
let p = paren!(x);
if!p.is_none() && val!(x) < val!(p.upgrade()) {
// 假装x节点本身也是一个符合条件的父节点
marked!(x, true);
ent = x.downgrade();
} else {
ent = WeakNode::none();
}
self.cut_meld_unmark_to_roots(ent);
if val!(x) < val!(self.min) {
debug_assert!(paren!(x).is_none());
self.min = x;
}
}
/// WARNING: O(rank) = O(n)
fn increase_key_(&mut self, x: Node<I, T>) {
let ent;
let mut children_lost = if marked!(x) { 1 } else { 0 };
for child in x.children() {
if val!(child) < val!(x) {
x.cut_child(child.clone());
self.push_into_roots(child.clone());
marked!(child, false);
children_lost += 1;
}
}
match children_lost.cmp(&1) {
Less => ent = WeakNode::none(),
Equal => {
marked!(x, true);
ent = paren!(x);
}
Greater => {
marked!(x, true);
ent = x.downgrade();
}
}
self.cut_meld_unmark_to_roots(ent);
// WARNING: O(rank), update self.min
if x.rc_eq(&self.min) {
let min_node =
self.roots().into_iter().min_by_key(|x| val!(x)).unwrap();
self.min = min_node;
}
}
fn cut_meld_unmark_to_roots(&mut self, ent: WeakNode<I, T>) {
if ent.is_none() {
return;
}
let mut x = ent.upgrade();
let mut p = paren!(x);
while marked!(x) &&!p.is_none() {
let strongp = p.upgrade();
strongp.cut_child(x.clone());
self.push_into_roots(x.clone());
marked!(x, false);
x = strongp;
p = paren!(x);
}
// 定义上不标记根,但这应该是无所谓的,标记对于可能的pop导致的树规整后的树情况更精确
marked!(x, true);
}
fn remove_from_index(&mut self, x: &Node<I, T>) -> I
where
I: Eq + Hash + Clone
{
let k = idx!(x);
self.nodes.remove(&k);
k
}
/// insert at sib of self.min, with purge
fn push_into_roots(&mut self, x: Node<I, T>) {
debug_assert!(!self.min.rc_eq(&x));
self.rcnt += 1;
x.purge_as_root();
if self.min.is_none() {
self.min = x;
left!(self.min, self.min.downgrade());
right!(self.min, self.min.clone());
} else {
debug_assert!(right!(self.min).is_some());
right!(x, right!(self.min));
left!(x, self.min.downgrade());
right!(self.min, x.clone());
left!(right!(x), x.downgrade());
}
}
/// from self.min go through all roots
fn roots(&self) -> Vec<Node<I, T>> {
let mut sibs = vec![];
if self.min.is_none() {
return sibs;
} else {
sibs.push(self.min.clone());
}
let mut sib = right!(self.min);
while!sib.rc_eq(&self.min) {
sibs.push(sib.clone());
sib = right!(sib);
}
sibs
}
fn remove_from_roots(&mut self, x: Node<I, T>) {
self.rcnt -= 1;
if self.rcnt > 0 {
right!(left!(x).upgrade(), right!(x));
left!(right!(x), left!(x));
}
x.purge_as_root();
}
/// update self.rcnt
fn merge_same_rank_root(
&mut self,
mut x: Node<I, T>,
mut y: Node<I, T>,
) -> Node<I, T> {
debug_assert_eq!(rank!(x), rank!(y));
// let x be parent
if val!(y) < val!(x) || val!(y) == val!(x) && y.rc_eq(&self.min) {
(x, y) = (y, x);
}
// remove y from roots
self.remove_from_roots(y.clone());
// link y to x child
right!(y, child!(x));
if child!(x).is_some() {
left!(child!(x), y.downgrade());
}
// link y to x
paren!(y, x.downgrade());
child!(x, y.clone());
rank!(x, rank!(x) + 1);
x
}
////////////////////////////////////////////////////////////////////////////
//// Validation method
/// Validate nodes are not None or Failed to upgrade to Rc
#[cfg(test)]
#[allow(unused)]
pub(crate) fn validate_ref(&self) {
if self.len() == 0 {
return;
}
/* validate roots */
for root in self.roots() {
assert!(root.is_some());
let rh = right!(root);
assert!(rh.is_some());
let wlf = left!(root);
assert!(!wlf.is_none());
let left = wlf.upgrade();
assert!(left.is_some());
let child = child!(root);
if child.is_some() {
child.validate_ref();
}
}
}
}
impl<I: Eq + Hash + Clone, T: Clone> FibHeap<I, T> {
fn overall_clone(
&self,
nodes: &mut HashMap<I, Node<I, T>>,
x: Node<I, T>,
) -> Node<I, T> {
if x.is_none() {
return Node::none();
}
// overall clone node body
let newx = node!(idx!(x), val!(x).clone(), rank!(x), marked!(x));
// update index reference
nodes.insert(idx!(x), newx.clone());
// recursive call it
let mut childen_iter = x.children().into_iter();
if let Some(child) = childen_iter.next() {
let newchild = self.overall_clone(nodes, child);
child!(newx, newchild.clone());
paren!(newchild, newx.downgrade());
let mut cur = newchild;
for child in childen_iter {
let newchild = self.overall_clone(nodes, child);
right!(cur, newchild.clone());
left!(newchild, cur.downgrade());
cur = newchild;
}
}
newx
}
}
impl<I, T> Drop for FibHeap<I, T> {
fn drop(&mut self) {
if self.len > 0 {
// break circle dependency to enable drop
let tail = left!(self.min).upgrade();
right!(tail, Node::none());
self.nodes.clear();
}
}
}
impl<T: Debug, K: Debug> Display for FibHeap<T, K> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut sib = self.min.clone();
for i in 1..=self.rcnt {
writeln!(f, "{} ({i:03}) {}", "-".repeat(28), "-".repeat(28))?;
// writeln!(f)?;
if sib.rc_eq(&self.min) {
write!(f, "M=>")?;
}
writeln!(f, "{}", sib)?;
debug_assert!(sib.is_some());
sib = right!(sib);
}
writeln!(f, "{}>> end <<{}", "-".repeat(28), "-".repeat(28))?;
Ok(())
}
}
impl<I: Ord + Hash + Clone + Debug, T: Ord + Clone + Debug> Clone for FibHeap<I, T> {
fn clone(&self) -> Self {
let len = self.len;
let rcnt = self.rcnt;
let mut nodes = HashMap::new();
let min;
let mut roots_iter | {
if !left!(x).is_none() {
right!(left!(x).upgrade(), right!(x));
} else {
debug_assert!(child!(self).rc_eq(&x));
child!(self, right!(x));
}
if !right!(x).is_none() {
left!(right!(x), left!(x));
}
rank!(self, rank!(self) - 1);
x.purge_as_root();
} | identifier_body |
fib.rs | rev ref
paren: WeakNode<I, T>,
child: Node<I, T>,
/// Indicate that it has lost a child
marked: bool,
}
////////////////////////////////////////////////////////////////////////////////
//// Implementation
impl<I: Debug, T: Debug> Debug for Node_<I, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{:?}[{:?}]{}",
self.idx,
self.val,
if self.marked { " X" } else { "" }
)
}
}
impl_node!();
impl<I, T> Node<I, T> {
fn children(&self) -> Vec<Self> {
let mut child = child!(self);
let mut res = vec![];
while child.is_some() {
res.push(child.clone());
child = right!(child);
}
res
}
/// remove paren, left and right
fn purge_as_root(&self) {
paren!(self, WeakNode::none());
left!(self, WeakNode::none());
right!(self, Node::none());
}
fn cut_child(&self, x: Node<I, T>) {
if!left!(x).is_none() {
right!(left!(x).upgrade(), right!(x));
} else {
debug_assert!(child!(self).rc_eq(&x));
child!(self, right!(x));
}
if!right!(x).is_none() {
left!(right!(x), left!(x));
}
rank!(self, rank!(self) - 1);
x.purge_as_root();
}
/// replace with new val, return old val
fn replace_key(&self, val: T) -> T
where
I: Debug,
T: Debug
{
replace(val_mut!(self), val)
}
fn replace(&mut self, x: Self) -> Self {
let old = Self(self.0.clone());
self.0 = x.0;
old
}
#[cfg(test)]
#[allow(unused)]
fn validate_ref(&self)
where
I: Clone,
{
assert!(self.is_some());
let _self_idx = idx!(self);
/* validate right sibling */
let rh = right!(self);
if rh.is_some() {
let _rh_idx = idx!(rh);
let rhlf = left!(rh).upgrade();
assert!(rhlf.rc_eq(self));
assert!(rhlf.is_some());
rh.validate_ref();
}
/* validate children */
let child = child!(self);
if child.is_some() {
let _child_idx = idx!(child);
let cpw = paren!(child);
assert!(!cpw.is_none());
let cp = cpw.upgrade();
assert!(cp.rc_eq(self));
assert!(cp.is_some());
child.validate_ref();
}
}
}
impl<I: Debug, T: Debug> Debug for Node<I, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.is_none() {
write!(f, "None")
} else {
write!(f, "{:?}", self.0.as_ref().unwrap().as_ref().borrow())
}
}
}
impl<I: Debug, T: Debug> Display for Node<I, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(f, "R({:?}) ", self)?;
let mut curq = vec![(self.clone(), self.children())];
loop {
let mut nxtq = vec![];
for (p, children) in curq {
if children.is_empty() {
break;
}
write!(f, "P({:?}) ", p)?;
let childlen = children.len();
for (i, child) in children.into_iter().enumerate() {
write!(f, "{:?}", child)?;
if i < childlen - 1 {
write!(f, ", ")?;
}
nxtq.push((child.clone(), child.children()));
}
write!(f, "; ")?;
}
if!nxtq.is_empty() {
writeln!(f)?;
curq = nxtq;
} else {
break;
}
}
Ok(())
}
}
impl<I, T> FibHeap<I, T>
where
I: Eq + Hash + Clone + Debug,
T: Ord + Debug
{
////////////////////////////////////////////////////////////////////////////
//// Public method
pub fn new() -> Self {
Self {
len: 0,
rcnt: 0,
min: Node::none(),
nodes: HashMap::new(),
}
}
pub fn len(&self) -> usize {
self.len
}
/// Same index node would be overidden
pub fn push(&mut self, i: I, v: T)
{
let node = node!(i.clone(), v);
self.nodes.insert(i, node.clone());
self.push_into_roots(node.clone());
if val!(node) < val!(self.min) {
self.min = node;
}
self.len += 1;
}
/// Amortized cost O(rank(H))
///
/// trees(H') <= rank(H) + 1 # since no two trees have same rank.
///
/// delete-min
pub fn pop_item(&mut self) -> Option<(I, T)>
{
if self.min.is_none() {
return None;
}
self.len -= 1;
/* push children of oldmin into roots */
for child in self.min.children() {
self.push_into_roots(child.clone());
}
/* update min */
let newmin = self.roots()[1..]
.into_iter()
.min_by_key(|&sib| val!(sib))
.cloned()
.unwrap_or_default();
/* just del old min */
self.remove_from_roots(self.min.clone());
let oldmin = self.min.replace(newmin);
self.consolidate();
Some((
self.remove_from_index(&oldmin),
unwrap_into!(oldmin).val
))
}
/// merge same rank trees recusively
pub fn consolidate(&mut self) {
let mut rank: HashMap<usize, Node<I, T>> = hashmap!();
for mut sib in self.roots() {
while let Some(x) = rank.remove(&rank!(sib)) {
sib = self.merge_same_rank_root(x, sib);
}
rank.insert(rank!(sib), sib);
}
}
/// Return oldval, alias of ReplaceOrPush
///
/// Exec push if the val doesn't exist.
///
pub fn insert(&mut self, i: I, v: T) -> Option<T>
where
I: Eq + Hash + Clone,
T: Ord + Debug
{
match self.nodes.entry(i.clone()) {
Occupied(ent) => {
let x = ent.get().clone();
let oldv = x.replace_key(v);
match val!(x).cmp(&oldv) {
Less => self.decrease_key_(x),
Equal => (),
Greater => self.increase_key_(x),
}
Some(oldv)
}
Vacant(_ent) => {
self.push(i, v);
None
}
}
}
pub fn union(&mut self, _other: Self) {
unimplemented!("link roots, but not O(1) for link index reference")
}
pub fn delete<Q: AsRef<I>>(&mut self, _i: Q) -> Option<T> {
unimplemented!("1. decrease-val to -infi, 2. pop");
}
////////////////////////////////////////////////////////////////////////////
//// Extra functional method
/// Return oldval
///
pub fn decrease_key(&mut self, i: I, v: T) -> Option<T>
where
I: Eq + Hash + Clone,
T: Debug
{
let x;
match self.nodes.entry(i.clone()) {
Occupied(ent) => {
x = ent.get().clone();
let oldv = x.replace_key(v);
self.decrease_key_(x);
Some(oldv)
}
Vacant(_ent) => None,
}
}
pub fn top_item(&self) -> Option<(I, &T)>
where
I: Eq + Clone
{
if self.min.is_some() {
Some((idx!(self.min), val!(self.min)))
} else {
None
}
}
pub fn top(&self) -> Option<&T> {
self.top_item().map(|x| x.1)
}
pub fn pop(&mut self) -> Option<T> {
self.pop_item().map(|x| x.1)
}
pub fn get<Q>(&self, i: &Q) -> Option<&T>
where
I: Borrow<Q>,
Q: Ord + Hash +?Sized,
{
self.nodes.get(i).map(|node| val!(node))
}
pub fn indexes(&self) -> impl Iterator<Item = &I> {
self.nodes.keys()
}
////////////////////////////////////////////////////////////////////////////
//// Assistant method
fn decrease_key_(&mut self, x: Node<I, T>) {
let ent;
let p = paren!(x);
if!p.is_none() && val!(x) < val!(p.upgrade()) {
// 假装x节点本身也是一个符合条件的父节点
marked!(x, true);
ent = x.downgrade();
} else {
ent = WeakNode::none();
}
self.cut_meld_unmark_to_roots(ent);
if val!(x) < val!(self.min) {
debug_assert!(paren!(x).is_none());
self.min = x;
}
}
/// WARNING: O(rank) = O(n)
fn increase_key_(&mut self, x: Node<I, T>) {
let ent;
let mut children_lost = if marked!(x) { 1 } else { 0 };
for child in x.children() {
if val!(child) < val!(x) {
x.cut_child(child.clone());
self.push_into_roots(child.clone());
marked!(child, false);
children_lost += 1;
}
}
match children_lost.cmp(&1) {
Less => ent = WeakNode::none(),
Equal => {
marked!(x, true);
ent = paren!(x);
}
Greater => {
marked!(x, true);
ent = x.downgrade();
}
}
self.cut_meld_unmark_to_roots(ent);
// WARNING: O(rank), update self.min
if x.rc_eq(&self.min) {
let min_node =
self.roots().into_iter().min_by_key(|x| val!(x)).unwrap();
self.min = min_node;
}
}
fn cut_meld_unmark_to_roots(&mut self, ent: WeakNode<I, T>) {
if ent.is_none() {
return;
}
let mut x = ent.upgrade();
let mut p = paren!(x);
while marked!(x) &&!p.is_none() {
let strongp = p.upgrade();
strongp.cut_child(x.clone());
self.push_into_roots(x.clone());
marked!(x, false);
x = strongp;
p = paren!(x);
}
// 定义上不标记根,但这应该是无所谓的,标记对于可能的pop导致的树规整后的树情况更精确
marked!(x, true);
}
fn remove_from_index(&mut self, x: &Node<I, T>) -> I
where
I: Eq + Hash + Clone
{
let k = idx!(x);
self.nodes.remove(&k);
k
}
/// insert at sib of self.min, with purge
fn push_into_roots(&mut self, x: Node<I, T>) {
debug_assert!(!self.min.rc_eq(&x));
self.rcnt += 1;
x.purge_as_root();
if self.min.is_none() {
self.min = x;
left!(self.min, self.min.downgrade());
right!(self.min, self.min.clone());
} else {
debug_assert!(right!(self.min).is_some());
right!(x, right!(self.min));
left!(x, self.min.downgrade());
right!(self.min, x.clone());
left!(right!(x), x.downgrade());
}
}
/// from self.min go through all roots
fn roots(&self) -> Vec<Node<I, T>> {
let mut sibs = vec![];
if self.min.is_none() {
return sibs;
} else {
sibs.push(self.min.clone());
}
let mut sib = right!(self.min);
while!sib.rc_eq(&self.min) {
sibs.push(sib.clone());
sib = right!(sib);
}
sibs
}
fn remove_from_roots(&mut self, x: Node<I, T>) {
self.rcnt -= 1;
if self.rcnt > 0 {
right!(left!(x).upgrade(), right!(x));
left!(right!(x), left!(x));
}
x.purge_as_root();
}
/// update self.rcnt
fn merge_same_rank_root(
&mut self,
mut x: Node<I, T>,
mut y: Node<I, T>,
) -> Node<I, T> {
debug_assert_eq!(rank!(x), rank!(y));
// let x be parent
if val!(y) < val!(x) || val!(y) == val!(x) && y.rc_eq(&self.min) {
(x, y) = (y, x);
}
// remove y from roots
self.remove_from_roots(y.clone());
// link y to x child
right!(y, child!(x));
if child!(x).is_some() {
left!(child!(x), y.downgrade());
}
// link y to x
paren!(y, x.downgrade());
child!(x, y.clone());
rank!(x, rank!(x) + 1);
x
}
////////////////////////////////////////////////////////////////////////////
//// Validation method
/// Validate nodes are not None or Failed to upgrade to Rc
#[cfg(test)]
#[allow(unused)]
pub(crate) fn validate_ref(&self) {
if self.len() == 0 {
return;
}
/* validate roots */
for root in self.roots() {
assert!(root.is_some());
let rh = right!(root);
assert!(rh.is_some());
let wlf = left!(root);
assert!(!wlf.is_none());
let left = wlf.upgrade();
assert!(left.is_some());
let child = child!(root);
if child.is_some() {
child.validate_ref();
}
}
}
}
impl<I: Eq + Hash + Clone, T: Clone> FibHeap<I, T> {
fn overall_clone(
&self,
nodes: &mut HashMap<I, Node<I, T>>,
x: Node<I, T>,
) -> Node<I, T> {
if x.is_none() {
return Node::none();
}
// overall clone node body
let newx = node!(idx!(x), val!(x).clone(), rank!(x), marked!(x));
// update index reference
nodes.insert(idx!(x), newx.clone());
// recursive call it
let mut childen_iter = x.children().into_iter();
if let Some(child) = childen_iter.next() {
let newchild = self.overall_clone(nodes, child);
child!(newx, newchild.clone());
paren!(newchild, newx.downgrade());
let mut cur = newchild;
for child in childen_iter {
let newchild = self.overall_clone(nodes, child);
right!(cur, newchild.clone());
left!(newchild, cur.downgrade());
cur = newchild;
}
}
newx
}
}
impl<I, T> Drop for FibHeap<I, T> {
fn drop(&mut self) {
if self.len > 0 {
// break circle dependency to enable drop
let tail = left!(self.min).upgrade();
right!(tail, Node::none());
self.nodes.clear();
}
}
}
impl<T: Debug, K: Debug> Display for FibHeap<T, K> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut sib = self.min.clone();
for i in 1..=self.rcnt {
writeln!(f, "{} ({i:03}) {}", "-".repeat(28), "-".repeat(28))?;
// writeln!(f)?;
if sib.rc_eq(&self.min) {
write!(f, "M=>")?;
}
writeln!(f, "{}", sib)?;
debug_assert! | }
writeln!(f, "{}>> end <<{}", "-".repeat(28), "-".repeat(28))?;
Ok(())
}
}
impl<I: Ord + Hash + Clone + Debug, T: Ord + Clone + Debug> Clone for FibHeap<I, T> {
fn clone(&self) -> Self {
let len = self.len;
let rcnt = self.rcnt;
let mut nodes = HashMap::new();
let min;
let mut roots_iter | (sib.is_some());
sib = right!(sib);
| conditional_block |
fib.rs | while child.is_some() {
res.push(child.clone());
child = right!(child);
}
res
}
/// remove paren, left and right
fn purge_as_root(&self) {
paren!(self, WeakNode::none());
left!(self, WeakNode::none());
right!(self, Node::none());
}
fn cut_child(&self, x: Node<I, T>) {
if!left!(x).is_none() {
right!(left!(x).upgrade(), right!(x));
} else {
debug_assert!(child!(self).rc_eq(&x));
child!(self, right!(x));
}
if!right!(x).is_none() {
left!(right!(x), left!(x));
}
rank!(self, rank!(self) - 1);
x.purge_as_root();
}
/// replace with new val, return old val
fn replace_key(&self, val: T) -> T
where
I: Debug,
T: Debug
{
replace(val_mut!(self), val)
}
fn replace(&mut self, x: Self) -> Self {
let old = Self(self.0.clone());
self.0 = x.0;
old
}
#[cfg(test)]
#[allow(unused)]
fn validate_ref(&self)
where
I: Clone,
{
assert!(self.is_some());
let _self_idx = idx!(self);
/* validate right sibling */
let rh = right!(self);
if rh.is_some() {
let _rh_idx = idx!(rh);
let rhlf = left!(rh).upgrade();
assert!(rhlf.rc_eq(self));
assert!(rhlf.is_some());
rh.validate_ref();
}
/* validate children */
let child = child!(self);
if child.is_some() {
let _child_idx = idx!(child);
let cpw = paren!(child);
assert!(!cpw.is_none());
let cp = cpw.upgrade();
assert!(cp.rc_eq(self));
assert!(cp.is_some());
child.validate_ref();
}
}
}
impl<I: Debug, T: Debug> Debug for Node<I, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.is_none() {
write!(f, "None")
} else {
write!(f, "{:?}", self.0.as_ref().unwrap().as_ref().borrow())
}
}
}
impl<I: Debug, T: Debug> Display for Node<I, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(f, "R({:?}) ", self)?;
let mut curq = vec![(self.clone(), self.children())];
loop {
let mut nxtq = vec![];
for (p, children) in curq {
if children.is_empty() {
break;
}
write!(f, "P({:?}) ", p)?;
let childlen = children.len();
for (i, child) in children.into_iter().enumerate() {
write!(f, "{:?}", child)?;
if i < childlen - 1 {
write!(f, ", ")?;
}
nxtq.push((child.clone(), child.children()));
}
write!(f, "; ")?;
}
if!nxtq.is_empty() {
writeln!(f)?;
curq = nxtq;
} else {
break;
}
}
Ok(())
}
}
impl<I, T> FibHeap<I, T>
where
I: Eq + Hash + Clone + Debug,
T: Ord + Debug
{
////////////////////////////////////////////////////////////////////////////
//// Public method
pub fn new() -> Self {
Self {
len: 0,
rcnt: 0,
min: Node::none(),
nodes: HashMap::new(),
}
}
pub fn len(&self) -> usize {
self.len
}
/// Same index node would be overidden
pub fn push(&mut self, i: I, v: T)
{
let node = node!(i.clone(), v);
self.nodes.insert(i, node.clone());
self.push_into_roots(node.clone());
if val!(node) < val!(self.min) {
self.min = node;
}
self.len += 1;
}
/// Amortized cost O(rank(H))
///
/// trees(H') <= rank(H) + 1 # since no two trees have same rank.
///
/// delete-min
pub fn pop_item(&mut self) -> Option<(I, T)>
{
if self.min.is_none() {
return None;
}
self.len -= 1;
/* push children of oldmin into roots */
for child in self.min.children() {
self.push_into_roots(child.clone());
}
/* update min */
let newmin = self.roots()[1..]
.into_iter()
.min_by_key(|&sib| val!(sib))
.cloned()
.unwrap_or_default();
/* just del old min */
self.remove_from_roots(self.min.clone());
let oldmin = self.min.replace(newmin);
self.consolidate();
Some((
self.remove_from_index(&oldmin),
unwrap_into!(oldmin).val
))
}
/// merge same rank trees recusively
pub fn consolidate(&mut self) {
let mut rank: HashMap<usize, Node<I, T>> = hashmap!();
for mut sib in self.roots() {
while let Some(x) = rank.remove(&rank!(sib)) {
sib = self.merge_same_rank_root(x, sib);
}
rank.insert(rank!(sib), sib);
}
}
/// Return oldval, alias of ReplaceOrPush
///
/// Exec push if the val doesn't exist.
///
pub fn insert(&mut self, i: I, v: T) -> Option<T>
where
I: Eq + Hash + Clone,
T: Ord + Debug
{
match self.nodes.entry(i.clone()) {
Occupied(ent) => {
let x = ent.get().clone();
let oldv = x.replace_key(v);
match val!(x).cmp(&oldv) {
Less => self.decrease_key_(x),
Equal => (),
Greater => self.increase_key_(x),
}
Some(oldv)
}
Vacant(_ent) => {
self.push(i, v);
None
}
}
}
pub fn union(&mut self, _other: Self) {
unimplemented!("link roots, but not O(1) for link index reference")
}
pub fn delete<Q: AsRef<I>>(&mut self, _i: Q) -> Option<T> {
unimplemented!("1. decrease-val to -infi, 2. pop");
}
////////////////////////////////////////////////////////////////////////////
//// Extra functional method
/// Return oldval
///
pub fn decrease_key(&mut self, i: I, v: T) -> Option<T>
where
I: Eq + Hash + Clone,
T: Debug
{
let x;
match self.nodes.entry(i.clone()) {
Occupied(ent) => {
x = ent.get().clone();
let oldv = x.replace_key(v);
self.decrease_key_(x);
Some(oldv)
}
Vacant(_ent) => None,
}
}
pub fn top_item(&self) -> Option<(I, &T)>
where
I: Eq + Clone
{
if self.min.is_some() {
Some((idx!(self.min), val!(self.min)))
} else {
None
}
}
pub fn top(&self) -> Option<&T> {
self.top_item().map(|x| x.1)
}
pub fn pop(&mut self) -> Option<T> {
self.pop_item().map(|x| x.1)
}
pub fn get<Q>(&self, i: &Q) -> Option<&T>
where
I: Borrow<Q>,
Q: Ord + Hash +?Sized,
{
self.nodes.get(i).map(|node| val!(node))
}
pub fn indexes(&self) -> impl Iterator<Item = &I> {
self.nodes.keys()
}
////////////////////////////////////////////////////////////////////////////
//// Assistant method
fn decrease_key_(&mut self, x: Node<I, T>) {
let ent;
let p = paren!(x);
if!p.is_none() && val!(x) < val!(p.upgrade()) {
// 假装x节点本身也是一个符合条件的父节点
marked!(x, true);
ent = x.downgrade();
} else {
ent = WeakNode::none();
}
self.cut_meld_unmark_to_roots(ent);
if val!(x) < val!(self.min) {
debug_assert!(paren!(x).is_none());
self.min = x;
}
}
/// WARNING: O(rank) = O(n)
fn increase_key_(&mut self, x: Node<I, T>) {
let ent;
let mut children_lost = if marked!(x) { 1 } else { 0 };
for child in x.children() {
if val!(child) < val!(x) {
x.cut_child(child.clone());
self.push_into_roots(child.clone());
marked!(child, false);
children_lost += 1;
}
}
match children_lost.cmp(&1) {
Less => ent = WeakNode::none(),
Equal => {
marked!(x, true);
ent = paren!(x);
}
Greater => {
marked!(x, true);
ent = x.downgrade();
}
}
self.cut_meld_unmark_to_roots(ent);
// WARNING: O(rank), update self.min
if x.rc_eq(&self.min) {
let min_node =
self.roots().into_iter().min_by_key(|x| val!(x)).unwrap();
self.min = min_node;
}
}
fn cut_meld_unmark_to_roots(&mut self, ent: WeakNode<I, T>) {
if ent.is_none() {
return;
}
let mut x = ent.upgrade();
let mut p = paren!(x);
while marked!(x) &&!p.is_none() {
let strongp = p.upgrade();
strongp.cut_child(x.clone());
self.push_into_roots(x.clone());
marked!(x, false);
x = strongp;
p = paren!(x);
}
// 定义上不标记根,但这应该是无所谓的,标记对于可能的pop导致的树规整后的树情况更精确
marked!(x, true);
}
fn remove_from_index(&mut self, x: &Node<I, T>) -> I
where
I: Eq + Hash + Clone
{
let k = idx!(x);
self.nodes.remove(&k);
k
}
/// insert at sib of self.min, with purge
fn push_into_roots(&mut self, x: Node<I, T>) {
debug_assert!(!self.min.rc_eq(&x));
self.rcnt += 1;
x.purge_as_root();
if self.min.is_none() {
self.min = x;
left!(self.min, self.min.downgrade());
right!(self.min, self.min.clone());
} else {
debug_assert!(right!(self.min).is_some());
right!(x, right!(self.min));
left!(x, self.min.downgrade());
right!(self.min, x.clone());
left!(right!(x), x.downgrade());
}
}
/// from self.min go through all roots
fn roots(&self) -> Vec<Node<I, T>> {
let mut sibs = vec![];
if self.min.is_none() {
return sibs;
} else {
sibs.push(self.min.clone());
}
let mut sib = right!(self.min);
while!sib.rc_eq(&self.min) {
sibs.push(sib.clone());
sib = right!(sib);
}
sibs
}
fn remove_from_roots(&mut self, x: Node<I, T>) {
self.rcnt -= 1;
if self.rcnt > 0 {
right!(left!(x).upgrade(), right!(x));
left!(right!(x), left!(x));
}
x.purge_as_root();
}
/// update self.rcnt
fn merge_same_rank_root(
&mut self,
mut x: Node<I, T>,
mut y: Node<I, T>,
) -> Node<I, T> {
debug_assert_eq!(rank!(x), rank!(y));
// let x be parent
if val!(y) < val!(x) || val!(y) == val!(x) && y.rc_eq(&self.min) {
(x, y) = (y, x);
}
// remove y from roots
self.remove_from_roots(y.clone());
// link y to x child
right!(y, child!(x));
if child!(x).is_some() {
left!(child!(x), y.downgrade());
}
// link y to x
paren!(y, x.downgrade());
child!(x, y.clone());
rank!(x, rank!(x) + 1);
x
}
////////////////////////////////////////////////////////////////////////////
//// Validation method
/// Validate nodes are not None or Failed to upgrade to Rc
#[cfg(test)]
#[allow(unused)]
pub(crate) fn validate_ref(&self) {
if self.len() == 0 {
return;
}
/* validate roots */
for root in self.roots() {
assert!(root.is_some());
let rh = right!(root);
assert!(rh.is_some());
let wlf = left!(root);
assert!(!wlf.is_none());
let left = wlf.upgrade();
assert!(left.is_some());
let child = child!(root);
if child.is_some() {
child.validate_ref();
}
}
}
}
impl<I: Eq + Hash + Clone, T: Clone> FibHeap<I, T> {
fn overall_clone(
&self,
nodes: &mut HashMap<I, Node<I, T>>,
x: Node<I, T>,
) -> Node<I, T> {
if x.is_none() {
return Node::none();
}
// overall clone node body
let newx = node!(idx!(x), val!(x).clone(), rank!(x), marked!(x));
// update index reference
nodes.insert(idx!(x), newx.clone());
// recursive call it
let mut childen_iter = x.children().into_iter();
if let Some(child) = childen_iter.next() {
let newchild = self.overall_clone(nodes, child);
child!(newx, newchild.clone());
paren!(newchild, newx.downgrade());
let mut cur = newchild;
for child in childen_iter {
let newchild = self.overall_clone(nodes, child);
right!(cur, newchild.clone());
left!(newchild, cur.downgrade());
cur = newchild;
}
}
newx
}
}
impl<I, T> Drop for FibHeap<I, T> {
fn drop(&mut self) {
if self.len > 0 {
// break circle dependency to enable drop
let tail = left!(self.min).upgrade();
right!(tail, Node::none());
self.nodes.clear();
}
}
}
impl<T: Debug, K: Debug> Display for FibHeap<T, K> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut sib = self.min.clone();
for i in 1..=self.rcnt {
writeln!(f, "{} ({i:03}) {}", "-".repeat(28), "-".repeat(28))?;
// writeln!(f)?;
if sib.rc_eq(&self.min) {
write!(f, "M=>")?;
}
writeln!(f, "{}", sib)?;
debug_assert!(sib.is_some());
sib = right!(sib);
}
writeln!(f, "{}>> end <<{}", "-".repeat(28), "-".repeat(28))?;
Ok(())
}
}
impl<I: Ord + Hash + Clone + Debug, T: Ord + Clone + Debug> Clone for FibHeap<I, T> {
fn clone(&self) -> Self {
let len = self.len;
let rcnt = self.rcnt;
let mut nodes = HashMap::new();
let min;
let mut roots_iter = self.roots().into_iter();
if let Some(_min) = roots_iter.next() {
min = self.overall_clone(&mut nodes, _min.clone());
let mut cur = min.clone();
for root in roots_iter {
let newroot = self.overall_clone(&mut nodes, root);
right!(cur, newroot.clone());
left!(newroot, cur.downgrade());
cur = newroot;
}
right!(cur, min.clone());
left!(min, cur.downgrade());
} else { | min = Node::none();
}
| random_line_split |
|
main.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::{BufRead, BufReader};
const COLLECTED_ORE: u64 = 1000000000000;
#[derive(Debug, Eq, PartialEq)]
struct Reaction {
output: (String, u64),
ingredients: Vec<(String, u64)>,
}
type ReactionMap = HashMap<String, Reaction>;
fn calc_ore(reactions: &ReactionMap) -> u64 {
calc_ore_for_fuel(1, reactions)
}
fn calc_ore_for_fuel(fuel: u64, reactions: &ReactionMap) -> u64 {
let mut ore = 0;
let mut spare_chemicals = HashMap::new();
let mut requirements = Vec::new();
requirements.push((String::from("FUEL"), fuel));
let ore_name = String::from("ORE");
while!requirements.is_empty() {
let cur_requirements = requirements.clone();
requirements.clear();
for (req_chem, req_amount) in cur_requirements {
// Check whether we have any spare of this ingredient from
// other reactions.
let mut adj_req_amount = req_amount;
if let Some(spare) = spare_chemicals.get_mut(&req_chem) {
if *spare >= req_amount {
// We have enough spare to completely fulfill this
// requirement, no need to go further.
*spare -= req_amount;
continue;
} else {
// Reduce the required amount by the amount we have
// spare;
adj_req_amount = req_amount - *spare;
*spare = 0;
}
}
// Find the reaction that produces this ingredient.
let reaction = reactions
.get(&req_chem)
.expect(format!("Couldn't find reaction for {}", req_chem).as_ref());
// Find out how many times we need to run this reaction,
// and how much will be spare.
let output_amount = reaction.output.1;
let reaction_count = (adj_req_amount - 1) / output_amount + 1;
let spare = output_amount * reaction_count - adj_req_amount;
// Update the spare count for this ingredient.
if let Some(existing_spare) = spare_chemicals.get_mut(&req_chem) {
*existing_spare += spare;
} else {
spare_chemicals.insert(req_chem, spare);
}
// Update the required ingredients list with the ingredients
// needed to make this chemical.
for ingredient in reaction.ingredients.clone() {
let ingredient_name = ingredient.0;
let ingredient_count = reaction_count * ingredient.1;
if ingredient_name == ore_name {
ore += ingredient_count;
} else {
requirements.push((ingredient_name, ingredient_count));
}
}
}
}
ore
}
fn calc_fuel_for_ore(ore: u64, reactions: &ReactionMap) -> u64 | let used_ore = calc_ore_for_fuel(current, reactions);
if used_ore < ore {
lower = current;
} else {
upper = current;
}
if upper - 1 == lower {
return lower;
}
}
}
fn parse_chemical(chemical: &str) -> (String, u64) {
let mut iter = chemical.split_whitespace();
let count = iter.next().unwrap().parse::<u64>().unwrap();
let chem = iter.next().unwrap();
(String::from(chem), count)
}
fn parse_reactions(strs: &[String]) -> ReactionMap {
let mut reactions = HashMap::new();
for reaction in strs {
let mut iter = reaction.split(" => ");
let ingredients_str = iter.next().unwrap();
let output_str = iter.next().unwrap();
let mut ingredients = Vec::new();
for ingredient in ingredients_str.split(", ") {
ingredients.push(parse_chemical(ingredient));
}
let output = parse_chemical(output_str);
reactions.insert(
output.0.clone(),
Reaction {
output: output,
ingredients: ingredients,
},
);
}
reactions
}
fn parse_input(filename: &str) -> ReactionMap {
let file = File::open(filename).expect("Failed to open file");
let reader = BufReader::new(file);
let reactions: Vec<String> = reader
.lines()
.map(|l| l.expect("Failed to read line"))
.map(|l| String::from(l.trim()))
.collect();
parse_reactions(reactions.as_slice())
}
fn main() {
let reactions = parse_input("input");
// Part 1
let ore = calc_ore(&reactions);
println!("Require {} ore for 1 fuel", ore);
// Part 2
let fuel = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
println!("Produce {} fuel from {} ore", fuel, COLLECTED_ORE);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse() {
let input = vec![String::from("7 A, 1 E => 1 FUEL")];
let reactions = parse_reactions(input.as_slice());
let result = reactions.get(&String::from("FUEL"));
assert!(result.is_some());
let reaction = result.unwrap();
assert_eq!(
*reaction,
Reaction {
output: (String::from("FUEL"), 1),
ingredients: vec![(String::from("A"), 7), (String::from("E"), 1),],
},
);
}
#[test]
fn example1() {
let input = vec![
String::from("10 ORE => 10 A"),
String::from("1 ORE => 1 B"),
String::from("7 A, 1 B => 1 C"),
String::from("7 A, 1 C => 1 D"),
String::from("7 A, 1 D => 1 E"),
String::from("7 A, 1 E => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 31);
}
#[test]
fn example2() {
let input = vec![
String::from("9 ORE => 2 A"),
String::from("8 ORE => 3 B"),
String::from("7 ORE => 5 C"),
String::from("3 A, 4 B => 1 AB"),
String::from("5 B, 7 C => 1 BC"),
String::from("4 C, 1 A => 1 CA"),
String::from("2 AB, 3 BC, 4 CA => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 165);
}
#[test]
fn example3() {
let input = vec![
String::from("157 ORE => 5 NZVS"),
String::from("165 ORE => 6 DCFZ"),
String::from("44 XJWVT, 5 KHKGT, 1 QDVJ, 29 NZVS, 9 GPVTF, 48 HKGWZ => 1 FUEL"),
String::from("12 HKGWZ, 1 GPVTF, 8 PSHF => 9 QDVJ"),
String::from("179 ORE => 7 PSHF"),
String::from("177 ORE => 5 HKGWZ"),
String::from("7 DCFZ, 7 PSHF => 2 XJWVT"),
String::from("165 ORE => 2 GPVTF"),
String::from("3 DCFZ, 7 NZVS, 5 HKGWZ, 10 PSHF => 8 KHKGT"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 13312);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 82892753);
}
#[test]
fn example4() {
let input = vec![
String::from("2 VPVL, 7 FWMGM, 2 CXFTF, 11 MNCFX => 1 STKFG"),
String::from("17 NVRVD, 3 JNWZP => 8 VPVL"),
String::from("53 STKFG, 6 MNCFX, 46 VJHF, 81 HVMC, 68 CXFTF, 25 GNMV => 1 FUEL"),
String::from("22 VJHF, 37 MNCFX => 5 FWMGM"),
String::from("139 ORE => 4 NVRVD"),
String::from("144 ORE => 7 JNWZP"),
String::from("5 MNCFX, 7 RFSQX, 2 FWMGM, 2 VPVL, 19 CXFTF => 3 HVMC"),
String::from("5 VJHF, 7 MNCFX, 9 VPVL, 37 CXFTF => 6 GNMV"),
String::from("145 ORE => 6 MNCFX"),
String::from("1 NVRVD => 8 CXFTF"),
String::from("1 VJHF, 6 MNCFX => 4 RFSQX"),
String::from("176 ORE => 6 VJHF"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 180697);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 5586022);
}
#[test]
fn example5() {
let input = vec![
String::from("171 ORE => 8 CNZTR"),
String::from("7 ZLQW, 3 BMBT, 9 XCVML, 26 XMNCP, 1 WPTQ, 2 MZWV, 1 RJRHP => 4 PLWSL"),
String::from("114 ORE => 4 BHXH"),
String::from("14 VRPVC => 6 BMBT"),
String::from("6 BHXH, 18 KTJDG, 12 WPTQ, 7 PLWSL, 31 FHTLT, 37 ZDVW => 1 FUEL"),
String::from("6 WPTQ, 2 BMBT, 8 ZLQW, 18 KTJDG, 1 XMNCP, 6 MZWV, 1 RJRHP => 6 FHTLT"),
String::from("15 XDBXC, 2 LTCX, 1 VRPVC => 6 ZLQW"),
String::from("13 WPTQ, 10 LTCX, 3 RJRHP, 14 XMNCP, 2 MZWV, 1 ZLQW => 1 ZDVW"),
String::from("5 BMBT => 4 WPTQ"),
String::from("189 ORE => 9 KTJDG"),
String::from("1 MZWV, 17 XDBXC, 3 XCVML => 2 XMNCP"),
String::from("12 VRPVC, 27 CNZTR => 2 XDBXC"),
String::from("15 KTJDG, 12 BHXH => 5 XCVML"),
String::from("3 BHXH, 2 VRPVC => 7 MZWV"),
String::from("121 ORE => 7 VRPVC"),
String::from("7 XCVML => 6 RJRHP"),
String::from("5 BHXH, 4 VRPVC => 5 LTCX"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 2210736);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 460664);
}
}
| {
let mut lower = 1;
let mut current;
let mut upper = 1;
// Find an upper bound to use for binary search.
loop {
let used_ore = calc_ore_for_fuel(upper, reactions);
if used_ore < ore {
upper *= 2;
} else {
break;
}
}
// Binary search to find the highest amount of fuel we can
// produce without using all the ore.
loop {
current = (upper - lower) / 2 + lower;
| identifier_body |
main.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::{BufRead, BufReader};
const COLLECTED_ORE: u64 = 1000000000000;
#[derive(Debug, Eq, PartialEq)]
struct Reaction {
output: (String, u64),
ingredients: Vec<(String, u64)>,
}
type ReactionMap = HashMap<String, Reaction>;
fn calc_ore(reactions: &ReactionMap) -> u64 {
calc_ore_for_fuel(1, reactions)
}
fn calc_ore_for_fuel(fuel: u64, reactions: &ReactionMap) -> u64 {
let mut ore = 0;
let mut spare_chemicals = HashMap::new();
let mut requirements = Vec::new();
requirements.push((String::from("FUEL"), fuel));
let ore_name = String::from("ORE");
while!requirements.is_empty() {
let cur_requirements = requirements.clone();
requirements.clear();
for (req_chem, req_amount) in cur_requirements {
// Check whether we have any spare of this ingredient from
// other reactions.
let mut adj_req_amount = req_amount;
if let Some(spare) = spare_chemicals.get_mut(&req_chem) {
if *spare >= req_amount {
// We have enough spare to completely fulfill this
// requirement, no need to go further.
*spare -= req_amount;
continue;
} else {
// Reduce the required amount by the amount we have
// spare;
adj_req_amount = req_amount - *spare;
*spare = 0;
}
}
// Find the reaction that produces this ingredient.
let reaction = reactions
.get(&req_chem)
.expect(format!("Couldn't find reaction for {}", req_chem).as_ref());
// Find out how many times we need to run this reaction,
// and how much will be spare.
let output_amount = reaction.output.1;
let reaction_count = (adj_req_amount - 1) / output_amount + 1;
let spare = output_amount * reaction_count - adj_req_amount;
// Update the spare count for this ingredient.
if let Some(existing_spare) = spare_chemicals.get_mut(&req_chem) {
*existing_spare += spare;
} else {
spare_chemicals.insert(req_chem, spare);
}
// Update the required ingredients list with the ingredients
// needed to make this chemical.
for ingredient in reaction.ingredients.clone() {
let ingredient_name = ingredient.0;
let ingredient_count = reaction_count * ingredient.1;
if ingredient_name == ore_name {
ore += ingredient_count;
} else {
requirements.push((ingredient_name, ingredient_count));
}
}
}
}
ore
}
fn calc_fuel_for_ore(ore: u64, reactions: &ReactionMap) -> u64 {
let mut lower = 1;
let mut current;
let mut upper = 1;
// Find an upper bound to use for binary search.
loop {
let used_ore = calc_ore_for_fuel(upper, reactions);
if used_ore < ore {
upper *= 2;
} else {
break;
}
}
// Binary search to find the highest amount of fuel we can
// produce without using all the ore.
loop {
current = (upper - lower) / 2 + lower;
let used_ore = calc_ore_for_fuel(current, reactions);
if used_ore < ore {
lower = current;
} else {
upper = current;
}
if upper - 1 == lower {
return lower;
}
}
}
fn parse_chemical(chemical: &str) -> (String, u64) {
let mut iter = chemical.split_whitespace();
let count = iter.next().unwrap().parse::<u64>().unwrap();
let chem = iter.next().unwrap();
(String::from(chem), count)
}
fn parse_reactions(strs: &[String]) -> ReactionMap {
let mut reactions = HashMap::new();
for reaction in strs {
let mut iter = reaction.split(" => ");
let ingredients_str = iter.next().unwrap();
let output_str = iter.next().unwrap();
let mut ingredients = Vec::new();
for ingredient in ingredients_str.split(", ") {
ingredients.push(parse_chemical(ingredient));
}
let output = parse_chemical(output_str);
reactions.insert(
output.0.clone(),
Reaction {
output: output,
ingredients: ingredients,
},
);
}
reactions
}
fn parse_input(filename: &str) -> ReactionMap {
let file = File::open(filename).expect("Failed to open file");
let reader = BufReader::new(file);
let reactions: Vec<String> = reader
.lines()
.map(|l| l.expect("Failed to read line"))
.map(|l| String::from(l.trim()))
.collect();
parse_reactions(reactions.as_slice())
}
fn main() {
let reactions = parse_input("input");
// Part 1
let ore = calc_ore(&reactions);
println!("Require {} ore for 1 fuel", ore);
// Part 2
let fuel = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
println!("Produce {} fuel from {} ore", fuel, COLLECTED_ORE);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn | () {
let input = vec![String::from("7 A, 1 E => 1 FUEL")];
let reactions = parse_reactions(input.as_slice());
let result = reactions.get(&String::from("FUEL"));
assert!(result.is_some());
let reaction = result.unwrap();
assert_eq!(
*reaction,
Reaction {
output: (String::from("FUEL"), 1),
ingredients: vec![(String::from("A"), 7), (String::from("E"), 1),],
},
);
}
#[test]
fn example1() {
let input = vec![
String::from("10 ORE => 10 A"),
String::from("1 ORE => 1 B"),
String::from("7 A, 1 B => 1 C"),
String::from("7 A, 1 C => 1 D"),
String::from("7 A, 1 D => 1 E"),
String::from("7 A, 1 E => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 31);
}
#[test]
fn example2() {
let input = vec![
String::from("9 ORE => 2 A"),
String::from("8 ORE => 3 B"),
String::from("7 ORE => 5 C"),
String::from("3 A, 4 B => 1 AB"),
String::from("5 B, 7 C => 1 BC"),
String::from("4 C, 1 A => 1 CA"),
String::from("2 AB, 3 BC, 4 CA => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 165);
}
#[test]
fn example3() {
let input = vec![
String::from("157 ORE => 5 NZVS"),
String::from("165 ORE => 6 DCFZ"),
String::from("44 XJWVT, 5 KHKGT, 1 QDVJ, 29 NZVS, 9 GPVTF, 48 HKGWZ => 1 FUEL"),
String::from("12 HKGWZ, 1 GPVTF, 8 PSHF => 9 QDVJ"),
String::from("179 ORE => 7 PSHF"),
String::from("177 ORE => 5 HKGWZ"),
String::from("7 DCFZ, 7 PSHF => 2 XJWVT"),
String::from("165 ORE => 2 GPVTF"),
String::from("3 DCFZ, 7 NZVS, 5 HKGWZ, 10 PSHF => 8 KHKGT"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 13312);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 82892753);
}
#[test]
fn example4() {
let input = vec![
String::from("2 VPVL, 7 FWMGM, 2 CXFTF, 11 MNCFX => 1 STKFG"),
String::from("17 NVRVD, 3 JNWZP => 8 VPVL"),
String::from("53 STKFG, 6 MNCFX, 46 VJHF, 81 HVMC, 68 CXFTF, 25 GNMV => 1 FUEL"),
String::from("22 VJHF, 37 MNCFX => 5 FWMGM"),
String::from("139 ORE => 4 NVRVD"),
String::from("144 ORE => 7 JNWZP"),
String::from("5 MNCFX, 7 RFSQX, 2 FWMGM, 2 VPVL, 19 CXFTF => 3 HVMC"),
String::from("5 VJHF, 7 MNCFX, 9 VPVL, 37 CXFTF => 6 GNMV"),
String::from("145 ORE => 6 MNCFX"),
String::from("1 NVRVD => 8 CXFTF"),
String::from("1 VJHF, 6 MNCFX => 4 RFSQX"),
String::from("176 ORE => 6 VJHF"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 180697);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 5586022);
}
#[test]
fn example5() {
let input = vec![
String::from("171 ORE => 8 CNZTR"),
String::from("7 ZLQW, 3 BMBT, 9 XCVML, 26 XMNCP, 1 WPTQ, 2 MZWV, 1 RJRHP => 4 PLWSL"),
String::from("114 ORE => 4 BHXH"),
String::from("14 VRPVC => 6 BMBT"),
String::from("6 BHXH, 18 KTJDG, 12 WPTQ, 7 PLWSL, 31 FHTLT, 37 ZDVW => 1 FUEL"),
String::from("6 WPTQ, 2 BMBT, 8 ZLQW, 18 KTJDG, 1 XMNCP, 6 MZWV, 1 RJRHP => 6 FHTLT"),
String::from("15 XDBXC, 2 LTCX, 1 VRPVC => 6 ZLQW"),
String::from("13 WPTQ, 10 LTCX, 3 RJRHP, 14 XMNCP, 2 MZWV, 1 ZLQW => 1 ZDVW"),
String::from("5 BMBT => 4 WPTQ"),
String::from("189 ORE => 9 KTJDG"),
String::from("1 MZWV, 17 XDBXC, 3 XCVML => 2 XMNCP"),
String::from("12 VRPVC, 27 CNZTR => 2 XDBXC"),
String::from("15 KTJDG, 12 BHXH => 5 XCVML"),
String::from("3 BHXH, 2 VRPVC => 7 MZWV"),
String::from("121 ORE => 7 VRPVC"),
String::from("7 XCVML => 6 RJRHP"),
String::from("5 BHXH, 4 VRPVC => 5 LTCX"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 2210736);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 460664);
}
}
| test_parse | identifier_name |
main.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::{BufRead, BufReader};
const COLLECTED_ORE: u64 = 1000000000000;
#[derive(Debug, Eq, PartialEq)]
struct Reaction {
output: (String, u64),
ingredients: Vec<(String, u64)>,
}
type ReactionMap = HashMap<String, Reaction>;
fn calc_ore(reactions: &ReactionMap) -> u64 {
calc_ore_for_fuel(1, reactions)
}
fn calc_ore_for_fuel(fuel: u64, reactions: &ReactionMap) -> u64 {
let mut ore = 0;
let mut spare_chemicals = HashMap::new();
let mut requirements = Vec::new();
requirements.push((String::from("FUEL"), fuel));
let ore_name = String::from("ORE");
while!requirements.is_empty() {
let cur_requirements = requirements.clone();
requirements.clear();
for (req_chem, req_amount) in cur_requirements {
// Check whether we have any spare of this ingredient from
// other reactions.
let mut adj_req_amount = req_amount;
if let Some(spare) = spare_chemicals.get_mut(&req_chem) {
if *spare >= req_amount {
// We have enough spare to completely fulfill this
// requirement, no need to go further.
*spare -= req_amount;
continue;
} else {
// Reduce the required amount by the amount we have
// spare;
adj_req_amount = req_amount - *spare;
*spare = 0;
}
}
// Find the reaction that produces this ingredient.
let reaction = reactions
.get(&req_chem)
.expect(format!("Couldn't find reaction for {}", req_chem).as_ref());
// Find out how many times we need to run this reaction,
// and how much will be spare.
let output_amount = reaction.output.1;
let reaction_count = (adj_req_amount - 1) / output_amount + 1;
let spare = output_amount * reaction_count - adj_req_amount;
// Update the spare count for this ingredient.
if let Some(existing_spare) = spare_chemicals.get_mut(&req_chem) {
*existing_spare += spare;
} else {
spare_chemicals.insert(req_chem, spare);
}
// Update the required ingredients list with the ingredients
// needed to make this chemical.
for ingredient in reaction.ingredients.clone() {
let ingredient_name = ingredient.0;
let ingredient_count = reaction_count * ingredient.1;
if ingredient_name == ore_name {
ore += ingredient_count;
} else {
requirements.push((ingredient_name, ingredient_count));
}
}
}
}
ore
}
fn calc_fuel_for_ore(ore: u64, reactions: &ReactionMap) -> u64 {
let mut lower = 1;
let mut current;
let mut upper = 1;
// Find an upper bound to use for binary search.
loop {
let used_ore = calc_ore_for_fuel(upper, reactions);
if used_ore < ore {
upper *= 2;
} else {
break;
}
}
// Binary search to find the highest amount of fuel we can
// produce without using all the ore.
loop {
current = (upper - lower) / 2 + lower;
let used_ore = calc_ore_for_fuel(current, reactions);
if used_ore < ore {
lower = current;
} else {
upper = current;
}
if upper - 1 == lower {
return lower;
}
}
}
fn parse_chemical(chemical: &str) -> (String, u64) {
let mut iter = chemical.split_whitespace();
let count = iter.next().unwrap().parse::<u64>().unwrap();
let chem = iter.next().unwrap();
(String::from(chem), count)
}
fn parse_reactions(strs: &[String]) -> ReactionMap {
let mut reactions = HashMap::new();
for reaction in strs {
let mut iter = reaction.split(" => ");
let ingredients_str = iter.next().unwrap();
let output_str = iter.next().unwrap();
let mut ingredients = Vec::new();
for ingredient in ingredients_str.split(", ") {
ingredients.push(parse_chemical(ingredient));
}
let output = parse_chemical(output_str);
reactions.insert(
output.0.clone(),
Reaction {
output: output,
ingredients: ingredients,
},
); | }
fn parse_input(filename: &str) -> ReactionMap {
let file = File::open(filename).expect("Failed to open file");
let reader = BufReader::new(file);
let reactions: Vec<String> = reader
.lines()
.map(|l| l.expect("Failed to read line"))
.map(|l| String::from(l.trim()))
.collect();
parse_reactions(reactions.as_slice())
}
fn main() {
let reactions = parse_input("input");
// Part 1
let ore = calc_ore(&reactions);
println!("Require {} ore for 1 fuel", ore);
// Part 2
let fuel = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
println!("Produce {} fuel from {} ore", fuel, COLLECTED_ORE);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse() {
let input = vec![String::from("7 A, 1 E => 1 FUEL")];
let reactions = parse_reactions(input.as_slice());
let result = reactions.get(&String::from("FUEL"));
assert!(result.is_some());
let reaction = result.unwrap();
assert_eq!(
*reaction,
Reaction {
output: (String::from("FUEL"), 1),
ingredients: vec![(String::from("A"), 7), (String::from("E"), 1),],
},
);
}
#[test]
fn example1() {
let input = vec![
String::from("10 ORE => 10 A"),
String::from("1 ORE => 1 B"),
String::from("7 A, 1 B => 1 C"),
String::from("7 A, 1 C => 1 D"),
String::from("7 A, 1 D => 1 E"),
String::from("7 A, 1 E => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 31);
}
#[test]
fn example2() {
let input = vec![
String::from("9 ORE => 2 A"),
String::from("8 ORE => 3 B"),
String::from("7 ORE => 5 C"),
String::from("3 A, 4 B => 1 AB"),
String::from("5 B, 7 C => 1 BC"),
String::from("4 C, 1 A => 1 CA"),
String::from("2 AB, 3 BC, 4 CA => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 165);
}
#[test]
fn example3() {
let input = vec![
String::from("157 ORE => 5 NZVS"),
String::from("165 ORE => 6 DCFZ"),
String::from("44 XJWVT, 5 KHKGT, 1 QDVJ, 29 NZVS, 9 GPVTF, 48 HKGWZ => 1 FUEL"),
String::from("12 HKGWZ, 1 GPVTF, 8 PSHF => 9 QDVJ"),
String::from("179 ORE => 7 PSHF"),
String::from("177 ORE => 5 HKGWZ"),
String::from("7 DCFZ, 7 PSHF => 2 XJWVT"),
String::from("165 ORE => 2 GPVTF"),
String::from("3 DCFZ, 7 NZVS, 5 HKGWZ, 10 PSHF => 8 KHKGT"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 13312);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 82892753);
}
#[test]
fn example4() {
let input = vec![
String::from("2 VPVL, 7 FWMGM, 2 CXFTF, 11 MNCFX => 1 STKFG"),
String::from("17 NVRVD, 3 JNWZP => 8 VPVL"),
String::from("53 STKFG, 6 MNCFX, 46 VJHF, 81 HVMC, 68 CXFTF, 25 GNMV => 1 FUEL"),
String::from("22 VJHF, 37 MNCFX => 5 FWMGM"),
String::from("139 ORE => 4 NVRVD"),
String::from("144 ORE => 7 JNWZP"),
String::from("5 MNCFX, 7 RFSQX, 2 FWMGM, 2 VPVL, 19 CXFTF => 3 HVMC"),
String::from("5 VJHF, 7 MNCFX, 9 VPVL, 37 CXFTF => 6 GNMV"),
String::from("145 ORE => 6 MNCFX"),
String::from("1 NVRVD => 8 CXFTF"),
String::from("1 VJHF, 6 MNCFX => 4 RFSQX"),
String::from("176 ORE => 6 VJHF"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 180697);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 5586022);
}
#[test]
fn example5() {
let input = vec![
String::from("171 ORE => 8 CNZTR"),
String::from("7 ZLQW, 3 BMBT, 9 XCVML, 26 XMNCP, 1 WPTQ, 2 MZWV, 1 RJRHP => 4 PLWSL"),
String::from("114 ORE => 4 BHXH"),
String::from("14 VRPVC => 6 BMBT"),
String::from("6 BHXH, 18 KTJDG, 12 WPTQ, 7 PLWSL, 31 FHTLT, 37 ZDVW => 1 FUEL"),
String::from("6 WPTQ, 2 BMBT, 8 ZLQW, 18 KTJDG, 1 XMNCP, 6 MZWV, 1 RJRHP => 6 FHTLT"),
String::from("15 XDBXC, 2 LTCX, 1 VRPVC => 6 ZLQW"),
String::from("13 WPTQ, 10 LTCX, 3 RJRHP, 14 XMNCP, 2 MZWV, 1 ZLQW => 1 ZDVW"),
String::from("5 BMBT => 4 WPTQ"),
String::from("189 ORE => 9 KTJDG"),
String::from("1 MZWV, 17 XDBXC, 3 XCVML => 2 XMNCP"),
String::from("12 VRPVC, 27 CNZTR => 2 XDBXC"),
String::from("15 KTJDG, 12 BHXH => 5 XCVML"),
String::from("3 BHXH, 2 VRPVC => 7 MZWV"),
String::from("121 ORE => 7 VRPVC"),
String::from("7 XCVML => 6 RJRHP"),
String::from("5 BHXH, 4 VRPVC => 5 LTCX"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 2210736);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 460664);
}
} | }
reactions | random_line_split |
main.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::{BufRead, BufReader};
const COLLECTED_ORE: u64 = 1000000000000;
#[derive(Debug, Eq, PartialEq)]
struct Reaction {
output: (String, u64),
ingredients: Vec<(String, u64)>,
}
type ReactionMap = HashMap<String, Reaction>;
fn calc_ore(reactions: &ReactionMap) -> u64 {
calc_ore_for_fuel(1, reactions)
}
fn calc_ore_for_fuel(fuel: u64, reactions: &ReactionMap) -> u64 {
let mut ore = 0;
let mut spare_chemicals = HashMap::new();
let mut requirements = Vec::new();
requirements.push((String::from("FUEL"), fuel));
let ore_name = String::from("ORE");
while!requirements.is_empty() {
let cur_requirements = requirements.clone();
requirements.clear();
for (req_chem, req_amount) in cur_requirements {
// Check whether we have any spare of this ingredient from
// other reactions.
let mut adj_req_amount = req_amount;
if let Some(spare) = spare_chemicals.get_mut(&req_chem) {
if *spare >= req_amount {
// We have enough spare to completely fulfill this
// requirement, no need to go further.
*spare -= req_amount;
continue;
} else {
// Reduce the required amount by the amount we have
// spare;
adj_req_amount = req_amount - *spare;
*spare = 0;
}
}
// Find the reaction that produces this ingredient.
let reaction = reactions
.get(&req_chem)
.expect(format!("Couldn't find reaction for {}", req_chem).as_ref());
// Find out how many times we need to run this reaction,
// and how much will be spare.
let output_amount = reaction.output.1;
let reaction_count = (adj_req_amount - 1) / output_amount + 1;
let spare = output_amount * reaction_count - adj_req_amount;
// Update the spare count for this ingredient.
if let Some(existing_spare) = spare_chemicals.get_mut(&req_chem) {
*existing_spare += spare;
} else {
spare_chemicals.insert(req_chem, spare);
}
// Update the required ingredients list with the ingredients
// needed to make this chemical.
for ingredient in reaction.ingredients.clone() {
let ingredient_name = ingredient.0;
let ingredient_count = reaction_count * ingredient.1;
if ingredient_name == ore_name {
ore += ingredient_count;
} else {
requirements.push((ingredient_name, ingredient_count));
}
}
}
}
ore
}
fn calc_fuel_for_ore(ore: u64, reactions: &ReactionMap) -> u64 {
let mut lower = 1;
let mut current;
let mut upper = 1;
// Find an upper bound to use for binary search.
loop {
let used_ore = calc_ore_for_fuel(upper, reactions);
if used_ore < ore {
upper *= 2;
} else |
}
// Binary search to find the highest amount of fuel we can
// produce without using all the ore.
loop {
current = (upper - lower) / 2 + lower;
let used_ore = calc_ore_for_fuel(current, reactions);
if used_ore < ore {
lower = current;
} else {
upper = current;
}
if upper - 1 == lower {
return lower;
}
}
}
fn parse_chemical(chemical: &str) -> (String, u64) {
let mut iter = chemical.split_whitespace();
let count = iter.next().unwrap().parse::<u64>().unwrap();
let chem = iter.next().unwrap();
(String::from(chem), count)
}
fn parse_reactions(strs: &[String]) -> ReactionMap {
let mut reactions = HashMap::new();
for reaction in strs {
let mut iter = reaction.split(" => ");
let ingredients_str = iter.next().unwrap();
let output_str = iter.next().unwrap();
let mut ingredients = Vec::new();
for ingredient in ingredients_str.split(", ") {
ingredients.push(parse_chemical(ingredient));
}
let output = parse_chemical(output_str);
reactions.insert(
output.0.clone(),
Reaction {
output: output,
ingredients: ingredients,
},
);
}
reactions
}
fn parse_input(filename: &str) -> ReactionMap {
let file = File::open(filename).expect("Failed to open file");
let reader = BufReader::new(file);
let reactions: Vec<String> = reader
.lines()
.map(|l| l.expect("Failed to read line"))
.map(|l| String::from(l.trim()))
.collect();
parse_reactions(reactions.as_slice())
}
fn main() {
let reactions = parse_input("input");
// Part 1
let ore = calc_ore(&reactions);
println!("Require {} ore for 1 fuel", ore);
// Part 2
let fuel = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
println!("Produce {} fuel from {} ore", fuel, COLLECTED_ORE);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse() {
let input = vec![String::from("7 A, 1 E => 1 FUEL")];
let reactions = parse_reactions(input.as_slice());
let result = reactions.get(&String::from("FUEL"));
assert!(result.is_some());
let reaction = result.unwrap();
assert_eq!(
*reaction,
Reaction {
output: (String::from("FUEL"), 1),
ingredients: vec![(String::from("A"), 7), (String::from("E"), 1),],
},
);
}
#[test]
fn example1() {
let input = vec![
String::from("10 ORE => 10 A"),
String::from("1 ORE => 1 B"),
String::from("7 A, 1 B => 1 C"),
String::from("7 A, 1 C => 1 D"),
String::from("7 A, 1 D => 1 E"),
String::from("7 A, 1 E => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 31);
}
#[test]
fn example2() {
let input = vec![
String::from("9 ORE => 2 A"),
String::from("8 ORE => 3 B"),
String::from("7 ORE => 5 C"),
String::from("3 A, 4 B => 1 AB"),
String::from("5 B, 7 C => 1 BC"),
String::from("4 C, 1 A => 1 CA"),
String::from("2 AB, 3 BC, 4 CA => 1 FUEL"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 165);
}
#[test]
fn example3() {
let input = vec![
String::from("157 ORE => 5 NZVS"),
String::from("165 ORE => 6 DCFZ"),
String::from("44 XJWVT, 5 KHKGT, 1 QDVJ, 29 NZVS, 9 GPVTF, 48 HKGWZ => 1 FUEL"),
String::from("12 HKGWZ, 1 GPVTF, 8 PSHF => 9 QDVJ"),
String::from("179 ORE => 7 PSHF"),
String::from("177 ORE => 5 HKGWZ"),
String::from("7 DCFZ, 7 PSHF => 2 XJWVT"),
String::from("165 ORE => 2 GPVTF"),
String::from("3 DCFZ, 7 NZVS, 5 HKGWZ, 10 PSHF => 8 KHKGT"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 13312);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 82892753);
}
#[test]
fn example4() {
let input = vec![
String::from("2 VPVL, 7 FWMGM, 2 CXFTF, 11 MNCFX => 1 STKFG"),
String::from("17 NVRVD, 3 JNWZP => 8 VPVL"),
String::from("53 STKFG, 6 MNCFX, 46 VJHF, 81 HVMC, 68 CXFTF, 25 GNMV => 1 FUEL"),
String::from("22 VJHF, 37 MNCFX => 5 FWMGM"),
String::from("139 ORE => 4 NVRVD"),
String::from("144 ORE => 7 JNWZP"),
String::from("5 MNCFX, 7 RFSQX, 2 FWMGM, 2 VPVL, 19 CXFTF => 3 HVMC"),
String::from("5 VJHF, 7 MNCFX, 9 VPVL, 37 CXFTF => 6 GNMV"),
String::from("145 ORE => 6 MNCFX"),
String::from("1 NVRVD => 8 CXFTF"),
String::from("1 VJHF, 6 MNCFX => 4 RFSQX"),
String::from("176 ORE => 6 VJHF"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 180697);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 5586022);
}
#[test]
fn example5() {
let input = vec![
String::from("171 ORE => 8 CNZTR"),
String::from("7 ZLQW, 3 BMBT, 9 XCVML, 26 XMNCP, 1 WPTQ, 2 MZWV, 1 RJRHP => 4 PLWSL"),
String::from("114 ORE => 4 BHXH"),
String::from("14 VRPVC => 6 BMBT"),
String::from("6 BHXH, 18 KTJDG, 12 WPTQ, 7 PLWSL, 31 FHTLT, 37 ZDVW => 1 FUEL"),
String::from("6 WPTQ, 2 BMBT, 8 ZLQW, 18 KTJDG, 1 XMNCP, 6 MZWV, 1 RJRHP => 6 FHTLT"),
String::from("15 XDBXC, 2 LTCX, 1 VRPVC => 6 ZLQW"),
String::from("13 WPTQ, 10 LTCX, 3 RJRHP, 14 XMNCP, 2 MZWV, 1 ZLQW => 1 ZDVW"),
String::from("5 BMBT => 4 WPTQ"),
String::from("189 ORE => 9 KTJDG"),
String::from("1 MZWV, 17 XDBXC, 3 XCVML => 2 XMNCP"),
String::from("12 VRPVC, 27 CNZTR => 2 XDBXC"),
String::from("15 KTJDG, 12 BHXH => 5 XCVML"),
String::from("3 BHXH, 2 VRPVC => 7 MZWV"),
String::from("121 ORE => 7 VRPVC"),
String::from("7 XCVML => 6 RJRHP"),
String::from("5 BHXH, 4 VRPVC => 5 LTCX"),
];
let reactions = parse_reactions(input.as_slice());
let result = calc_ore(&reactions);
assert_eq!(result, 2210736);
let result = calc_fuel_for_ore(COLLECTED_ORE, &reactions);
assert_eq!(result, 460664);
}
}
| {
break;
} | conditional_block |
ecvrf.rs | // Copyright (c) The XPeer Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! This module implements an instantiation of a verifiable random function known as
//! [ECVRF-ED25519-SHA512-TAI](https://tools.ietf.org/html/draft-irtf-cfrg-vrf-04).
//!
//! # Examples
//!
//! ```
//! use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! use rand::{rngs::StdRng, SeedableRng};
//!
//! let message = b"Test message";
//! let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! let public_key: VRFPublicKey = (&private_key).into();
//! ```
//! **Note**: The above example generates a private key using a private function intended only for
//! testing purposes. Production code should find an alternate means for secure key generation.
//!
//! Produce a proof for a message from a `VRFPrivateKey`, and verify the proof and message
//! using a `VRFPublicKey`:
//!
//! ```
//! # use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! # use rand::{rngs::StdRng, SeedableRng};
//! # let message = b"Test message";
//! # let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! # let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! # let public_key: VRFPublicKey = (&private_key).into();
//! let proof = private_key.prove(message);
//! assert!(public_key.verify(&proof, message).is_ok());
//! ```
//!
//! Produce a pseudorandom output from a `Proof`:
//!
//! ```
//! # use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! # use rand::{rngs::StdRng, SeedableRng};
//! # let message = b"Test message";
//! # let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! # let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! # let public_key: VRFPublicKey = (&private_key).into();
//! # let proof = private_key.prove(message);
//! let output: Output = (&proof).into();
//! ```
use crate::traits::*;
use core::convert::TryFrom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_POINT,
edwards::{CompressedEdwardsY, EdwardsPoint},
scalar::Scalar as ed25519_Scalar,
};
use derive_deref::Deref;
use ed25519_dalek::{
self, Digest, PublicKey as ed25519_PublicKey, SecretKey as ed25519_PrivateKey, Sha512,
};
use failure::prelude::*;
use serde::{Deserialize, Serialize};
const SUITE: u8 = 0x03;
const ONE: u8 = 0x01;
const TWO: u8 = 0x02;
const THREE: u8 = 0x03;
/// The number of bytes of [`Output`]
pub const OUTPUT_LENGTH: usize = 64;
/// The number of bytes of [`Proof`]
pub const PROOF_LENGTH: usize = 80;
/// An ECVRF private key
#[derive(Serialize, Deserialize, Deref, Debug)]
pub struct VRFPrivateKey(ed25519_PrivateKey);
/// An ECVRF public key
#[derive(Serialize, Deserialize, Deref, Debug, PartialEq, Eq)]
pub struct VRFPublicKey(ed25519_PublicKey);
/// A longer private key which is slightly optimized for proof generation.
///
/// This is similar in structure to ed25519_dalek::ExpandedSecretKey. It can be produced from
/// a VRFPrivateKey.
pub struct VRFExpandedPrivateKey {
pub(super) key: ed25519_Scalar,
pub(super) nonce: [u8; 32],
}
impl VRFPrivateKey {
/// Produces a proof for an input (using the private key)
pub fn prove(&self, alpha: &[u8]) -> Proof {
VRFExpandedPrivateKey::from(self).prove(&VRFPublicKey((&self.0).into()), alpha)
}
}
impl VRFExpandedPrivateKey {
/// Produces a proof for an input (using the expanded private key)
pub fn prove(&self, pk: &VRFPublicKey, alpha: &[u8]) -> Proof {
let h_point = pk.hash_to_curve(alpha);
let k_scalar =
ed25519_Scalar::from_bytes_mod_order_wide(&nonce_generation_bytes(self.nonce, h_point));
let gamma = h_point * self.key;
let c_scalar = hash_points(&[
h_point,
gamma,
ED25519_BASEPOINT_POINT * k_scalar,
h_point * k_scalar,
]);
Proof {
gamma,
c: c_scalar,
s: k_scalar + c_scalar * self.key,
}
}
}
impl Uniform for VRFPrivateKey {
fn generate_for_testing<R>(rng: &mut R) -> Self
where
R: SeedableCryptoRng,
|
}
impl TryFrom<&[u8]> for VRFPrivateKey {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<VRFPrivateKey, CryptoMaterialError> {
Ok(VRFPrivateKey(
ed25519_PrivateKey::from_bytes(bytes).unwrap(),
))
}
}
impl TryFrom<&[u8]> for VRFPublicKey {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<VRFPublicKey, CryptoMaterialError> {
if bytes.len()!= ed25519_dalek::PUBLIC_KEY_LENGTH {
return Err(CryptoMaterialError::WrongLengthError);
}
let mut bits: [u8; 32] = [0u8; 32];
bits.copy_from_slice(&bytes[..32]);
let compressed = curve25519_dalek::edwards::CompressedEdwardsY(bits);
let point = compressed
.decompress()
.ok_or(CryptoMaterialError::DeserializationError)?;
// Check if the point lies on a small subgroup. This is required
// when using curves with a small cofactor (in ed25519, cofactor = 8).
if point.is_small_order() {
return Err(CryptoMaterialError::SmallSubgroupError);
}
Ok(VRFPublicKey(ed25519_PublicKey::from_bytes(bytes).unwrap()))
}
}
impl VRFPublicKey {
/// Given a [`Proof`] and an input, returns whether or not the proof is valid for the input
/// and public key
pub fn verify(&self, proof: &Proof, alpha: &[u8]) -> Result<()> {
let h_point = self.hash_to_curve(alpha);
let pk_point = CompressedEdwardsY::from_slice(self.as_bytes())
.decompress()
.unwrap();
let cprime = hash_points(&[
h_point,
proof.gamma,
ED25519_BASEPOINT_POINT * proof.s - pk_point * proof.c,
h_point * proof.s - proof.gamma * proof.c,
]);
if proof.c == cprime {
Ok(())
} else {
bail!("The proof failed to verify for this public key")
}
}
pub(super) fn hash_to_curve(&self, alpha: &[u8]) -> EdwardsPoint {
let mut result = [0u8; 32];
let mut counter = 0;
let mut wrapped_point: Option<EdwardsPoint> = None;
while wrapped_point.is_none() {
result.copy_from_slice(
&Sha512::new()
.chain(&[SUITE, ONE])
.chain(self.as_bytes())
.chain(&alpha)
.chain(&[counter])
.result()[..32],
);
wrapped_point = CompressedEdwardsY::from_slice(&result).decompress();
counter += 1;
}
wrapped_point.unwrap().mul_by_cofactor()
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFPublicKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let secret: &ed25519_PrivateKey = private_key;
let public: ed25519_PublicKey = secret.into();
VRFPublicKey(public)
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFExpandedPrivateKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let mut h: Sha512 = Sha512::default();
let mut hash: [u8; 64] = [0u8; 64];
let mut lower: [u8; 32] = [0u8; 32];
let mut upper: [u8; 32] = [0u8; 32];
h.input(private_key.to_bytes());
hash.copy_from_slice(h.result().as_slice());
lower.copy_from_slice(&hash[00..32]);
upper.copy_from_slice(&hash[32..64]);
lower[0] &= 248;
lower[31] &= 63;
lower[31] |= 64;
VRFExpandedPrivateKey {
key: ed25519_Scalar::from_bits(lower),
nonce: upper,
}
}
}
/// A VRF proof that can be used to validate an input with a public key
pub struct Proof {
gamma: EdwardsPoint,
c: ed25519_Scalar,
s: ed25519_Scalar,
}
impl Proof {
/// Produces a new Proof struct from its fields
pub fn new(gamma: EdwardsPoint, c: ed25519_Scalar, s: ed25519_Scalar) -> Proof {
Proof { gamma, c, s }
}
/// Converts a Proof into bytes
pub fn to_bytes(&self) -> [u8; PROOF_LENGTH] {
let mut ret = [0u8; PROOF_LENGTH];
ret[..32].copy_from_slice(&self.gamma.compress().to_bytes()[..]);
ret[32..48].copy_from_slice(&self.c.to_bytes()[..16]);
ret[48..].copy_from_slice(&self.s.to_bytes()[..]);
ret
}
}
impl TryFrom<&[u8]> for Proof {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<Proof, CryptoMaterialError> {
let mut c_buf = [0u8; 32];
c_buf[..16].copy_from_slice(&bytes[32..48]);
let mut s_buf = [0u8; 32];
s_buf.copy_from_slice(&bytes[48..]);
Ok(Proof {
gamma: CompressedEdwardsY::from_slice(&bytes[..32])
.decompress()
.unwrap(),
c: ed25519_Scalar::from_bits(c_buf),
s: ed25519_Scalar::from_bits(s_buf),
})
}
}
/// The ECVRF output produced from the proof
pub struct Output([u8; OUTPUT_LENGTH]);
impl Output {
/// Converts an Output into bytes
#[inline]
pub fn to_bytes(&self) -> [u8; OUTPUT_LENGTH] {
self.0
}
}
impl<'a> From<&'a Proof> for Output {
fn from(proof: &'a Proof) -> Output {
let mut output = [0u8; OUTPUT_LENGTH];
output.copy_from_slice(
&Sha512::new()
.chain(&[SUITE, THREE])
.chain(&proof.gamma.mul_by_cofactor().compress().to_bytes()[..])
.result()[..],
);
Output(output)
}
}
pub(super) fn nonce_generation_bytes(nonce: [u8; 32], h_point: EdwardsPoint) -> [u8; 64] {
let mut k_buf = [0u8; 64];
k_buf.copy_from_slice(
&Sha512::new()
.chain(nonce)
.chain(h_point.compress().as_bytes())
.result()[..],
);
k_buf
}
pub(super) fn hash_points(points: &[EdwardsPoint]) -> ed25519_Scalar {
let mut result = [0u8; 32];
let mut hash = Sha512::new().chain(&[SUITE, TWO]);
for point in points.iter() {
hash = hash.chain(point.compress().to_bytes());
}
result[..16].copy_from_slice(&hash.result()[..16]);
ed25519_Scalar::from_bits(result)
}
| {
VRFPrivateKey(ed25519_PrivateKey::generate(rng))
} | identifier_body |
ecvrf.rs | // Copyright (c) The XPeer Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! This module implements an instantiation of a verifiable random function known as
//! [ECVRF-ED25519-SHA512-TAI](https://tools.ietf.org/html/draft-irtf-cfrg-vrf-04).
//!
//! # Examples
//!
//! ```
//! use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! use rand::{rngs::StdRng, SeedableRng};
//!
//! let message = b"Test message";
//! let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! let public_key: VRFPublicKey = (&private_key).into();
//! ```
//! **Note**: The above example generates a private key using a private function intended only for
//! testing purposes. Production code should find an alternate means for secure key generation.
//!
//! Produce a proof for a message from a `VRFPrivateKey`, and verify the proof and message
//! using a `VRFPublicKey`:
//!
//! ```
//! # use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! # use rand::{rngs::StdRng, SeedableRng};
//! # let message = b"Test message";
//! # let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! # let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! # let public_key: VRFPublicKey = (&private_key).into();
//! let proof = private_key.prove(message);
//! assert!(public_key.verify(&proof, message).is_ok());
//! ```
//!
//! Produce a pseudorandom output from a `Proof`:
//!
//! ```
//! # use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! # use rand::{rngs::StdRng, SeedableRng};
//! # let message = b"Test message";
//! # let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! # let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! # let public_key: VRFPublicKey = (&private_key).into();
//! # let proof = private_key.prove(message);
//! let output: Output = (&proof).into();
//! ```
use crate::traits::*;
use core::convert::TryFrom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_POINT,
edwards::{CompressedEdwardsY, EdwardsPoint},
scalar::Scalar as ed25519_Scalar,
};
use derive_deref::Deref;
use ed25519_dalek::{
self, Digest, PublicKey as ed25519_PublicKey, SecretKey as ed25519_PrivateKey, Sha512,
};
use failure::prelude::*;
use serde::{Deserialize, Serialize};
const SUITE: u8 = 0x03;
const ONE: u8 = 0x01;
const TWO: u8 = 0x02;
const THREE: u8 = 0x03;
/// The number of bytes of [`Output`]
pub const OUTPUT_LENGTH: usize = 64;
/// The number of bytes of [`Proof`]
pub const PROOF_LENGTH: usize = 80;
/// An ECVRF private key
#[derive(Serialize, Deserialize, Deref, Debug)]
pub struct VRFPrivateKey(ed25519_PrivateKey);
/// An ECVRF public key
#[derive(Serialize, Deserialize, Deref, Debug, PartialEq, Eq)]
pub struct VRFPublicKey(ed25519_PublicKey);
/// A longer private key which is slightly optimized for proof generation.
///
/// This is similar in structure to ed25519_dalek::ExpandedSecretKey. It can be produced from
/// a VRFPrivateKey.
pub struct VRFExpandedPrivateKey {
pub(super) key: ed25519_Scalar,
pub(super) nonce: [u8; 32],
}
impl VRFPrivateKey {
/// Produces a proof for an input (using the private key)
pub fn prove(&self, alpha: &[u8]) -> Proof {
VRFExpandedPrivateKey::from(self).prove(&VRFPublicKey((&self.0).into()), alpha)
}
}
impl VRFExpandedPrivateKey {
/// Produces a proof for an input (using the expanded private key)
pub fn prove(&self, pk: &VRFPublicKey, alpha: &[u8]) -> Proof {
let h_point = pk.hash_to_curve(alpha);
let k_scalar =
ed25519_Scalar::from_bytes_mod_order_wide(&nonce_generation_bytes(self.nonce, h_point));
let gamma = h_point * self.key;
let c_scalar = hash_points(&[
h_point,
gamma,
ED25519_BASEPOINT_POINT * k_scalar,
h_point * k_scalar,
]);
Proof {
gamma,
c: c_scalar,
s: k_scalar + c_scalar * self.key,
}
}
}
impl Uniform for VRFPrivateKey {
fn generate_for_testing<R>(rng: &mut R) -> Self
where
R: SeedableCryptoRng,
{
VRFPrivateKey(ed25519_PrivateKey::generate(rng))
}
}
impl TryFrom<&[u8]> for VRFPrivateKey {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<VRFPrivateKey, CryptoMaterialError> {
Ok(VRFPrivateKey(
ed25519_PrivateKey::from_bytes(bytes).unwrap(),
))
}
}
impl TryFrom<&[u8]> for VRFPublicKey {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<VRFPublicKey, CryptoMaterialError> {
if bytes.len()!= ed25519_dalek::PUBLIC_KEY_LENGTH {
return Err(CryptoMaterialError::WrongLengthError);
}
let mut bits: [u8; 32] = [0u8; 32];
bits.copy_from_slice(&bytes[..32]);
let compressed = curve25519_dalek::edwards::CompressedEdwardsY(bits);
let point = compressed
.decompress()
.ok_or(CryptoMaterialError::DeserializationError)?;
// Check if the point lies on a small subgroup. This is required
// when using curves with a small cofactor (in ed25519, cofactor = 8).
if point.is_small_order() {
return Err(CryptoMaterialError::SmallSubgroupError);
}
Ok(VRFPublicKey(ed25519_PublicKey::from_bytes(bytes).unwrap()))
}
}
impl VRFPublicKey {
/// Given a [`Proof`] and an input, returns whether or not the proof is valid for the input
/// and public key
pub fn verify(&self, proof: &Proof, alpha: &[u8]) -> Result<()> {
let h_point = self.hash_to_curve(alpha);
let pk_point = CompressedEdwardsY::from_slice(self.as_bytes())
.decompress()
.unwrap();
let cprime = hash_points(&[
h_point,
proof.gamma,
ED25519_BASEPOINT_POINT * proof.s - pk_point * proof.c,
h_point * proof.s - proof.gamma * proof.c,
]);
if proof.c == cprime {
Ok(())
} else {
bail!("The proof failed to verify for this public key")
}
}
pub(super) fn hash_to_curve(&self, alpha: &[u8]) -> EdwardsPoint {
let mut result = [0u8; 32];
let mut counter = 0;
let mut wrapped_point: Option<EdwardsPoint> = None;
while wrapped_point.is_none() {
result.copy_from_slice(
&Sha512::new()
.chain(&[SUITE, ONE])
.chain(self.as_bytes())
.chain(&alpha)
.chain(&[counter])
.result()[..32],
);
wrapped_point = CompressedEdwardsY::from_slice(&result).decompress();
counter += 1;
}
wrapped_point.unwrap().mul_by_cofactor()
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFPublicKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let secret: &ed25519_PrivateKey = private_key;
let public: ed25519_PublicKey = secret.into();
VRFPublicKey(public)
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFExpandedPrivateKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let mut h: Sha512 = Sha512::default();
let mut hash: [u8; 64] = [0u8; 64];
let mut lower: [u8; 32] = [0u8; 32];
let mut upper: [u8; 32] = [0u8; 32];
h.input(private_key.to_bytes());
hash.copy_from_slice(h.result().as_slice());
lower.copy_from_slice(&hash[00..32]);
upper.copy_from_slice(&hash[32..64]);
lower[0] &= 248;
lower[31] &= 63;
lower[31] |= 64;
VRFExpandedPrivateKey {
key: ed25519_Scalar::from_bits(lower),
nonce: upper,
}
}
}
/// A VRF proof that can be used to validate an input with a public key
pub struct Proof {
gamma: EdwardsPoint,
c: ed25519_Scalar,
s: ed25519_Scalar,
}
impl Proof {
/// Produces a new Proof struct from its fields
pub fn new(gamma: EdwardsPoint, c: ed25519_Scalar, s: ed25519_Scalar) -> Proof {
Proof { gamma, c, s }
}
/// Converts a Proof into bytes
pub fn to_bytes(&self) -> [u8; PROOF_LENGTH] {
let mut ret = [0u8; PROOF_LENGTH];
ret[..32].copy_from_slice(&self.gamma.compress().to_bytes()[..]);
ret[32..48].copy_from_slice(&self.c.to_bytes()[..16]);
ret[48..].copy_from_slice(&self.s.to_bytes()[..]);
ret
}
}
impl TryFrom<&[u8]> for Proof {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<Proof, CryptoMaterialError> {
let mut c_buf = [0u8; 32];
c_buf[..16].copy_from_slice(&bytes[32..48]);
let mut s_buf = [0u8; 32];
s_buf.copy_from_slice(&bytes[48..]);
Ok(Proof {
gamma: CompressedEdwardsY::from_slice(&bytes[..32])
.decompress()
.unwrap(),
c: ed25519_Scalar::from_bits(c_buf),
s: ed25519_Scalar::from_bits(s_buf),
})
}
}
/// The ECVRF output produced from the proof
pub struct Output([u8; OUTPUT_LENGTH]);
impl Output {
/// Converts an Output into bytes
#[inline]
pub fn to_bytes(&self) -> [u8; OUTPUT_LENGTH] {
self.0
}
}
impl<'a> From<&'a Proof> for Output {
fn from(proof: &'a Proof) -> Output {
let mut output = [0u8; OUTPUT_LENGTH];
output.copy_from_slice(
&Sha512::new()
.chain(&[SUITE, THREE])
.chain(&proof.gamma.mul_by_cofactor().compress().to_bytes()[..])
.result()[..],
);
Output(output)
}
}
pub(super) fn nonce_generation_bytes(nonce: [u8; 32], h_point: EdwardsPoint) -> [u8; 64] {
let mut k_buf = [0u8; 64];
k_buf.copy_from_slice(
&Sha512::new()
.chain(nonce)
.chain(h_point.compress().as_bytes()) | .result()[..],
);
k_buf
}
pub(super) fn hash_points(points: &[EdwardsPoint]) -> ed25519_Scalar {
let mut result = [0u8; 32];
let mut hash = Sha512::new().chain(&[SUITE, TWO]);
for point in points.iter() {
hash = hash.chain(point.compress().to_bytes());
}
result[..16].copy_from_slice(&hash.result()[..16]);
ed25519_Scalar::from_bits(result)
} | random_line_split |
|
ecvrf.rs | // Copyright (c) The XPeer Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! This module implements an instantiation of a verifiable random function known as
//! [ECVRF-ED25519-SHA512-TAI](https://tools.ietf.org/html/draft-irtf-cfrg-vrf-04).
//!
//! # Examples
//!
//! ```
//! use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! use rand::{rngs::StdRng, SeedableRng};
//!
//! let message = b"Test message";
//! let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! let public_key: VRFPublicKey = (&private_key).into();
//! ```
//! **Note**: The above example generates a private key using a private function intended only for
//! testing purposes. Production code should find an alternate means for secure key generation.
//!
//! Produce a proof for a message from a `VRFPrivateKey`, and verify the proof and message
//! using a `VRFPublicKey`:
//!
//! ```
//! # use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! # use rand::{rngs::StdRng, SeedableRng};
//! # let message = b"Test message";
//! # let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! # let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! # let public_key: VRFPublicKey = (&private_key).into();
//! let proof = private_key.prove(message);
//! assert!(public_key.verify(&proof, message).is_ok());
//! ```
//!
//! Produce a pseudorandom output from a `Proof`:
//!
//! ```
//! # use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! # use rand::{rngs::StdRng, SeedableRng};
//! # let message = b"Test message";
//! # let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! # let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! # let public_key: VRFPublicKey = (&private_key).into();
//! # let proof = private_key.prove(message);
//! let output: Output = (&proof).into();
//! ```
use crate::traits::*;
use core::convert::TryFrom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_POINT,
edwards::{CompressedEdwardsY, EdwardsPoint},
scalar::Scalar as ed25519_Scalar,
};
use derive_deref::Deref;
use ed25519_dalek::{
self, Digest, PublicKey as ed25519_PublicKey, SecretKey as ed25519_PrivateKey, Sha512,
};
use failure::prelude::*;
use serde::{Deserialize, Serialize};
const SUITE: u8 = 0x03;
const ONE: u8 = 0x01;
const TWO: u8 = 0x02;
const THREE: u8 = 0x03;
/// The number of bytes of [`Output`]
pub const OUTPUT_LENGTH: usize = 64;
/// The number of bytes of [`Proof`]
pub const PROOF_LENGTH: usize = 80;
/// An ECVRF private key
#[derive(Serialize, Deserialize, Deref, Debug)]
pub struct VRFPrivateKey(ed25519_PrivateKey);
/// An ECVRF public key
#[derive(Serialize, Deserialize, Deref, Debug, PartialEq, Eq)]
pub struct VRFPublicKey(ed25519_PublicKey);
/// A longer private key which is slightly optimized for proof generation.
///
/// This is similar in structure to ed25519_dalek::ExpandedSecretKey. It can be produced from
/// a VRFPrivateKey.
pub struct VRFExpandedPrivateKey {
pub(super) key: ed25519_Scalar,
pub(super) nonce: [u8; 32],
}
impl VRFPrivateKey {
/// Produces a proof for an input (using the private key)
pub fn prove(&self, alpha: &[u8]) -> Proof {
VRFExpandedPrivateKey::from(self).prove(&VRFPublicKey((&self.0).into()), alpha)
}
}
impl VRFExpandedPrivateKey {
/// Produces a proof for an input (using the expanded private key)
pub fn prove(&self, pk: &VRFPublicKey, alpha: &[u8]) -> Proof {
let h_point = pk.hash_to_curve(alpha);
let k_scalar =
ed25519_Scalar::from_bytes_mod_order_wide(&nonce_generation_bytes(self.nonce, h_point));
let gamma = h_point * self.key;
let c_scalar = hash_points(&[
h_point,
gamma,
ED25519_BASEPOINT_POINT * k_scalar,
h_point * k_scalar,
]);
Proof {
gamma,
c: c_scalar,
s: k_scalar + c_scalar * self.key,
}
}
}
impl Uniform for VRFPrivateKey {
fn generate_for_testing<R>(rng: &mut R) -> Self
where
R: SeedableCryptoRng,
{
VRFPrivateKey(ed25519_PrivateKey::generate(rng))
}
}
impl TryFrom<&[u8]> for VRFPrivateKey {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<VRFPrivateKey, CryptoMaterialError> {
Ok(VRFPrivateKey(
ed25519_PrivateKey::from_bytes(bytes).unwrap(),
))
}
}
impl TryFrom<&[u8]> for VRFPublicKey {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<VRFPublicKey, CryptoMaterialError> {
if bytes.len()!= ed25519_dalek::PUBLIC_KEY_LENGTH {
return Err(CryptoMaterialError::WrongLengthError);
}
let mut bits: [u8; 32] = [0u8; 32];
bits.copy_from_slice(&bytes[..32]);
let compressed = curve25519_dalek::edwards::CompressedEdwardsY(bits);
let point = compressed
.decompress()
.ok_or(CryptoMaterialError::DeserializationError)?;
// Check if the point lies on a small subgroup. This is required
// when using curves with a small cofactor (in ed25519, cofactor = 8).
if point.is_small_order() {
return Err(CryptoMaterialError::SmallSubgroupError);
}
Ok(VRFPublicKey(ed25519_PublicKey::from_bytes(bytes).unwrap()))
}
}
impl VRFPublicKey {
/// Given a [`Proof`] and an input, returns whether or not the proof is valid for the input
/// and public key
pub fn verify(&self, proof: &Proof, alpha: &[u8]) -> Result<()> {
let h_point = self.hash_to_curve(alpha);
let pk_point = CompressedEdwardsY::from_slice(self.as_bytes())
.decompress()
.unwrap();
let cprime = hash_points(&[
h_point,
proof.gamma,
ED25519_BASEPOINT_POINT * proof.s - pk_point * proof.c,
h_point * proof.s - proof.gamma * proof.c,
]);
if proof.c == cprime | else {
bail!("The proof failed to verify for this public key")
}
}
pub(super) fn hash_to_curve(&self, alpha: &[u8]) -> EdwardsPoint {
let mut result = [0u8; 32];
let mut counter = 0;
let mut wrapped_point: Option<EdwardsPoint> = None;
while wrapped_point.is_none() {
result.copy_from_slice(
&Sha512::new()
.chain(&[SUITE, ONE])
.chain(self.as_bytes())
.chain(&alpha)
.chain(&[counter])
.result()[..32],
);
wrapped_point = CompressedEdwardsY::from_slice(&result).decompress();
counter += 1;
}
wrapped_point.unwrap().mul_by_cofactor()
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFPublicKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let secret: &ed25519_PrivateKey = private_key;
let public: ed25519_PublicKey = secret.into();
VRFPublicKey(public)
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFExpandedPrivateKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let mut h: Sha512 = Sha512::default();
let mut hash: [u8; 64] = [0u8; 64];
let mut lower: [u8; 32] = [0u8; 32];
let mut upper: [u8; 32] = [0u8; 32];
h.input(private_key.to_bytes());
hash.copy_from_slice(h.result().as_slice());
lower.copy_from_slice(&hash[00..32]);
upper.copy_from_slice(&hash[32..64]);
lower[0] &= 248;
lower[31] &= 63;
lower[31] |= 64;
VRFExpandedPrivateKey {
key: ed25519_Scalar::from_bits(lower),
nonce: upper,
}
}
}
/// A VRF proof that can be used to validate an input with a public key
pub struct Proof {
gamma: EdwardsPoint,
c: ed25519_Scalar,
s: ed25519_Scalar,
}
impl Proof {
/// Produces a new Proof struct from its fields
pub fn new(gamma: EdwardsPoint, c: ed25519_Scalar, s: ed25519_Scalar) -> Proof {
Proof { gamma, c, s }
}
/// Converts a Proof into bytes
pub fn to_bytes(&self) -> [u8; PROOF_LENGTH] {
let mut ret = [0u8; PROOF_LENGTH];
ret[..32].copy_from_slice(&self.gamma.compress().to_bytes()[..]);
ret[32..48].copy_from_slice(&self.c.to_bytes()[..16]);
ret[48..].copy_from_slice(&self.s.to_bytes()[..]);
ret
}
}
impl TryFrom<&[u8]> for Proof {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<Proof, CryptoMaterialError> {
let mut c_buf = [0u8; 32];
c_buf[..16].copy_from_slice(&bytes[32..48]);
let mut s_buf = [0u8; 32];
s_buf.copy_from_slice(&bytes[48..]);
Ok(Proof {
gamma: CompressedEdwardsY::from_slice(&bytes[..32])
.decompress()
.unwrap(),
c: ed25519_Scalar::from_bits(c_buf),
s: ed25519_Scalar::from_bits(s_buf),
})
}
}
/// The ECVRF output produced from the proof
pub struct Output([u8; OUTPUT_LENGTH]);
impl Output {
/// Converts an Output into bytes
#[inline]
pub fn to_bytes(&self) -> [u8; OUTPUT_LENGTH] {
self.0
}
}
impl<'a> From<&'a Proof> for Output {
fn from(proof: &'a Proof) -> Output {
let mut output = [0u8; OUTPUT_LENGTH];
output.copy_from_slice(
&Sha512::new()
.chain(&[SUITE, THREE])
.chain(&proof.gamma.mul_by_cofactor().compress().to_bytes()[..])
.result()[..],
);
Output(output)
}
}
pub(super) fn nonce_generation_bytes(nonce: [u8; 32], h_point: EdwardsPoint) -> [u8; 64] {
let mut k_buf = [0u8; 64];
k_buf.copy_from_slice(
&Sha512::new()
.chain(nonce)
.chain(h_point.compress().as_bytes())
.result()[..],
);
k_buf
}
pub(super) fn hash_points(points: &[EdwardsPoint]) -> ed25519_Scalar {
let mut result = [0u8; 32];
let mut hash = Sha512::new().chain(&[SUITE, TWO]);
for point in points.iter() {
hash = hash.chain(point.compress().to_bytes());
}
result[..16].copy_from_slice(&hash.result()[..16]);
ed25519_Scalar::from_bits(result)
}
| {
Ok(())
} | conditional_block |
ecvrf.rs | // Copyright (c) The XPeer Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! This module implements an instantiation of a verifiable random function known as
//! [ECVRF-ED25519-SHA512-TAI](https://tools.ietf.org/html/draft-irtf-cfrg-vrf-04).
//!
//! # Examples
//!
//! ```
//! use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! use rand::{rngs::StdRng, SeedableRng};
//!
//! let message = b"Test message";
//! let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! let public_key: VRFPublicKey = (&private_key).into();
//! ```
//! **Note**: The above example generates a private key using a private function intended only for
//! testing purposes. Production code should find an alternate means for secure key generation.
//!
//! Produce a proof for a message from a `VRFPrivateKey`, and verify the proof and message
//! using a `VRFPublicKey`:
//!
//! ```
//! # use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! # use rand::{rngs::StdRng, SeedableRng};
//! # let message = b"Test message";
//! # let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! # let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! # let public_key: VRFPublicKey = (&private_key).into();
//! let proof = private_key.prove(message);
//! assert!(public_key.verify(&proof, message).is_ok());
//! ```
//!
//! Produce a pseudorandom output from a `Proof`:
//!
//! ```
//! # use nextgen_crypto::{traits::Uniform, vrf::ecvrf::*};
//! # use rand::{rngs::StdRng, SeedableRng};
//! # let message = b"Test message";
//! # let mut rng: StdRng = SeedableRng::from_seed([0; 32]);
//! # let private_key = VRFPrivateKey::generate_for_testing(&mut rng);
//! # let public_key: VRFPublicKey = (&private_key).into();
//! # let proof = private_key.prove(message);
//! let output: Output = (&proof).into();
//! ```
use crate::traits::*;
use core::convert::TryFrom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_POINT,
edwards::{CompressedEdwardsY, EdwardsPoint},
scalar::Scalar as ed25519_Scalar,
};
use derive_deref::Deref;
use ed25519_dalek::{
self, Digest, PublicKey as ed25519_PublicKey, SecretKey as ed25519_PrivateKey, Sha512,
};
use failure::prelude::*;
use serde::{Deserialize, Serialize};
const SUITE: u8 = 0x03;
const ONE: u8 = 0x01;
const TWO: u8 = 0x02;
const THREE: u8 = 0x03;
/// The number of bytes of [`Output`]
pub const OUTPUT_LENGTH: usize = 64;
/// The number of bytes of [`Proof`]
pub const PROOF_LENGTH: usize = 80;
/// An ECVRF private key
#[derive(Serialize, Deserialize, Deref, Debug)]
pub struct VRFPrivateKey(ed25519_PrivateKey);
/// An ECVRF public key
#[derive(Serialize, Deserialize, Deref, Debug, PartialEq, Eq)]
pub struct VRFPublicKey(ed25519_PublicKey);
/// A longer private key which is slightly optimized for proof generation.
///
/// This is similar in structure to ed25519_dalek::ExpandedSecretKey. It can be produced from
/// a VRFPrivateKey.
pub struct VRFExpandedPrivateKey {
pub(super) key: ed25519_Scalar,
pub(super) nonce: [u8; 32],
}
impl VRFPrivateKey {
/// Produces a proof for an input (using the private key)
pub fn prove(&self, alpha: &[u8]) -> Proof {
VRFExpandedPrivateKey::from(self).prove(&VRFPublicKey((&self.0).into()), alpha)
}
}
impl VRFExpandedPrivateKey {
/// Produces a proof for an input (using the expanded private key)
pub fn prove(&self, pk: &VRFPublicKey, alpha: &[u8]) -> Proof {
let h_point = pk.hash_to_curve(alpha);
let k_scalar =
ed25519_Scalar::from_bytes_mod_order_wide(&nonce_generation_bytes(self.nonce, h_point));
let gamma = h_point * self.key;
let c_scalar = hash_points(&[
h_point,
gamma,
ED25519_BASEPOINT_POINT * k_scalar,
h_point * k_scalar,
]);
Proof {
gamma,
c: c_scalar,
s: k_scalar + c_scalar * self.key,
}
}
}
impl Uniform for VRFPrivateKey {
fn generate_for_testing<R>(rng: &mut R) -> Self
where
R: SeedableCryptoRng,
{
VRFPrivateKey(ed25519_PrivateKey::generate(rng))
}
}
impl TryFrom<&[u8]> for VRFPrivateKey {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<VRFPrivateKey, CryptoMaterialError> {
Ok(VRFPrivateKey(
ed25519_PrivateKey::from_bytes(bytes).unwrap(),
))
}
}
impl TryFrom<&[u8]> for VRFPublicKey {
type Error = CryptoMaterialError;
fn | (bytes: &[u8]) -> std::result::Result<VRFPublicKey, CryptoMaterialError> {
if bytes.len()!= ed25519_dalek::PUBLIC_KEY_LENGTH {
return Err(CryptoMaterialError::WrongLengthError);
}
let mut bits: [u8; 32] = [0u8; 32];
bits.copy_from_slice(&bytes[..32]);
let compressed = curve25519_dalek::edwards::CompressedEdwardsY(bits);
let point = compressed
.decompress()
.ok_or(CryptoMaterialError::DeserializationError)?;
// Check if the point lies on a small subgroup. This is required
// when using curves with a small cofactor (in ed25519, cofactor = 8).
if point.is_small_order() {
return Err(CryptoMaterialError::SmallSubgroupError);
}
Ok(VRFPublicKey(ed25519_PublicKey::from_bytes(bytes).unwrap()))
}
}
impl VRFPublicKey {
/// Given a [`Proof`] and an input, returns whether or not the proof is valid for the input
/// and public key
pub fn verify(&self, proof: &Proof, alpha: &[u8]) -> Result<()> {
let h_point = self.hash_to_curve(alpha);
let pk_point = CompressedEdwardsY::from_slice(self.as_bytes())
.decompress()
.unwrap();
let cprime = hash_points(&[
h_point,
proof.gamma,
ED25519_BASEPOINT_POINT * proof.s - pk_point * proof.c,
h_point * proof.s - proof.gamma * proof.c,
]);
if proof.c == cprime {
Ok(())
} else {
bail!("The proof failed to verify for this public key")
}
}
pub(super) fn hash_to_curve(&self, alpha: &[u8]) -> EdwardsPoint {
let mut result = [0u8; 32];
let mut counter = 0;
let mut wrapped_point: Option<EdwardsPoint> = None;
while wrapped_point.is_none() {
result.copy_from_slice(
&Sha512::new()
.chain(&[SUITE, ONE])
.chain(self.as_bytes())
.chain(&alpha)
.chain(&[counter])
.result()[..32],
);
wrapped_point = CompressedEdwardsY::from_slice(&result).decompress();
counter += 1;
}
wrapped_point.unwrap().mul_by_cofactor()
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFPublicKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let secret: &ed25519_PrivateKey = private_key;
let public: ed25519_PublicKey = secret.into();
VRFPublicKey(public)
}
}
impl<'a> From<&'a VRFPrivateKey> for VRFExpandedPrivateKey {
fn from(private_key: &'a VRFPrivateKey) -> Self {
let mut h: Sha512 = Sha512::default();
let mut hash: [u8; 64] = [0u8; 64];
let mut lower: [u8; 32] = [0u8; 32];
let mut upper: [u8; 32] = [0u8; 32];
h.input(private_key.to_bytes());
hash.copy_from_slice(h.result().as_slice());
lower.copy_from_slice(&hash[00..32]);
upper.copy_from_slice(&hash[32..64]);
lower[0] &= 248;
lower[31] &= 63;
lower[31] |= 64;
VRFExpandedPrivateKey {
key: ed25519_Scalar::from_bits(lower),
nonce: upper,
}
}
}
/// A VRF proof that can be used to validate an input with a public key
pub struct Proof {
gamma: EdwardsPoint,
c: ed25519_Scalar,
s: ed25519_Scalar,
}
impl Proof {
/// Produces a new Proof struct from its fields
pub fn new(gamma: EdwardsPoint, c: ed25519_Scalar, s: ed25519_Scalar) -> Proof {
Proof { gamma, c, s }
}
/// Converts a Proof into bytes
pub fn to_bytes(&self) -> [u8; PROOF_LENGTH] {
let mut ret = [0u8; PROOF_LENGTH];
ret[..32].copy_from_slice(&self.gamma.compress().to_bytes()[..]);
ret[32..48].copy_from_slice(&self.c.to_bytes()[..16]);
ret[48..].copy_from_slice(&self.s.to_bytes()[..]);
ret
}
}
impl TryFrom<&[u8]> for Proof {
type Error = CryptoMaterialError;
fn try_from(bytes: &[u8]) -> std::result::Result<Proof, CryptoMaterialError> {
let mut c_buf = [0u8; 32];
c_buf[..16].copy_from_slice(&bytes[32..48]);
let mut s_buf = [0u8; 32];
s_buf.copy_from_slice(&bytes[48..]);
Ok(Proof {
gamma: CompressedEdwardsY::from_slice(&bytes[..32])
.decompress()
.unwrap(),
c: ed25519_Scalar::from_bits(c_buf),
s: ed25519_Scalar::from_bits(s_buf),
})
}
}
/// The ECVRF output produced from the proof
pub struct Output([u8; OUTPUT_LENGTH]);
impl Output {
/// Converts an Output into bytes
#[inline]
pub fn to_bytes(&self) -> [u8; OUTPUT_LENGTH] {
self.0
}
}
impl<'a> From<&'a Proof> for Output {
fn from(proof: &'a Proof) -> Output {
let mut output = [0u8; OUTPUT_LENGTH];
output.copy_from_slice(
&Sha512::new()
.chain(&[SUITE, THREE])
.chain(&proof.gamma.mul_by_cofactor().compress().to_bytes()[..])
.result()[..],
);
Output(output)
}
}
pub(super) fn nonce_generation_bytes(nonce: [u8; 32], h_point: EdwardsPoint) -> [u8; 64] {
let mut k_buf = [0u8; 64];
k_buf.copy_from_slice(
&Sha512::new()
.chain(nonce)
.chain(h_point.compress().as_bytes())
.result()[..],
);
k_buf
}
pub(super) fn hash_points(points: &[EdwardsPoint]) -> ed25519_Scalar {
let mut result = [0u8; 32];
let mut hash = Sha512::new().chain(&[SUITE, TWO]);
for point in points.iter() {
hash = hash.chain(point.compress().to_bytes());
}
result[..16].copy_from_slice(&hash.result()[..16]);
ed25519_Scalar::from_bits(result)
}
| try_from | identifier_name |
parser.rs | use crate::Result;
use pom::char_class::{alpha, alphanum, multispace};
use pom::parser::*;
use std::str::FromStr;
use std::collections::HashMap;
use std::fmt::{Display, Formatter};
#[derive(Debug, Clone, PartialEq)]
pub struct StateMachine {
pub name: String,
pub states: Vec<State>,
pub accept_states: Vec<AcceptState>
}
#[derive(Debug, Clone, PartialEq)]
pub struct AcceptState(StateId, StateId);
#[derive(Debug, Clone, PartialEq)]
pub struct StateId(String);
#[derive(Debug, Clone, PartialEq)]
pub struct State {
pub id: StateId,
pub is_starting_state: bool,
pub description: Option<String>
}
impl AcceptState {
pub fn source(&self) -> &StateId {
&self.0
}
pub fn target(&self) -> &StateId {
&self.1
}
}
impl Display for StateId {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
/// space, tab, etc
fn ws<'a>() -> Parser<'a, u8, ()> {
is_a(multispace).discard()
}
/// whitespace and comments
fn space<'a>() -> Parser<'a, u8, ()> {
(ws() | comment()).repeat(0..).discard()
}
fn semi<'a>() -> Parser<'a, u8, ()> {
keyword(b";").name("semi")
}
fn to_eol<'a>() -> Parser<'a, u8, String> {
fn anything_else(term: u8) -> bool {
!is_cr(term) &&!is_lf(term)
}
is_a(anything_else)
.repeat(0..)
.map(|u8s| String::from_utf8(u8s).expect("can only parse utf"))
}
fn line_comment<'a>() -> Parser<'a, u8, ()> {
(seq(b"//") * to_eol() - eol())
.discard()
.name("line comment")
}
fn eol<'a>() -> Parser<'a, u8, ()> {
((is_a(is_cr) * is_a(is_lf)) | is_a(is_lf) | is_a(is_cr)).discard()
}
fn keyword<'a>(keyword: &'static [u8]) -> Parser<'a, u8, ()> {
literal(keyword).discard().name("keyword")
}
fn literal<'a>(literal: &'static [u8]) -> Parser<'a, u8, String> {
spaced(seq(literal))
.map(|u8s| String::from_utf8(u8s.to_vec()).expect("can only parse utf"))
.name("literal")
}
fn star_comment<'a>() -> Parser<'a, u8, ()> {
fn anything_else(term: u8) -> bool {
term!= b'*'
}
(seq(b"/*") * is_a(anything_else).repeat(0..) - seq(b"*/")).discard()
}
fn comment<'a>() -> Parser<'a, u8, ()> {
line_comment() | star_comment()
}
/// a parser wrapped in whitespace
fn spaced<'a, T>(parser: Parser<'a, u8, T>) -> Parser<'a, u8, T>
where
T: 'a,
{
space() * parser - space()
}
fn is_cr(term: u8) -> bool {
term == b'\r'
}
fn is_lf(term: u8) -> bool {
term == b'\n'
}
fn is_underscore(term: u8) -> bool {
term == b'_'
}
fn state_id<'a>() -> Parser<'a, u8, StateId> {
(identifier())
.map(|(ident)| StateId(ident))
}
fn identifier<'a>() -> Parser<'a, u8, String> {
let it = ((is_a(alpha) | is_a(is_underscore))
+ (is_a(alphanum) | is_a(is_underscore)).repeat(0..))
.map(|(first, rest)| format!("{}{}", first as char, String::from_utf8(rest).unwrap()));
spaced(it).name("name")
}
fn string<'a>() -> Parser<'a, u8, String> {
let special_char = sym(b'\\')
| sym(b'/')
| sym(b'"')
| sym(b'b').map(|_| b'\x08')
| sym(b'f').map(|_| b'\x0C')
| sym(b'n').map(|_| b'\n')
| sym(b'r').map(|_| b'\r')
| sym(b't').map(|_| b'\t');
let escape_sequence = sym(b'\\') * special_char;
let string = sym(b'"') * (none_of(b"\\\"") | escape_sequence).repeat(0..) - sym(b'"');
string.convert(String::from_utf8)
}
fn state<'a>() -> Parser<'a, u8, State> {
let raw = keyword(b"state") * identifier() + string().opt()
- semi();
raw.map(move |(identifier, description)| State {
id: StateId(identifier),
is_starting_state: false,
description
})
}
fn | <'a>() -> Parser<'a, u8, Vec<State>> {
fn tag_starting_state(idx: usize, state: State) -> State {
State {
is_starting_state: idx == 0,
..state
}
};
state().repeat(0..).map(|states| states.into_iter().enumerate().map(|(idx, state)| tag_starting_state(idx, state)).collect())
}
fn accept_states_list<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
accept_states_chain()
.repeat(0..)
.map(|chains| chains.into_iter().flatten().collect())
}
fn accept_states_chain<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
let raw = spaced(list(spaced(state_id()), keyword(b"->"))) - semi();
raw.map(move |(state_ids)| {
if state_ids.len() < 2 {
return vec![];
}
let mut result = vec![];
for i in 0..state_ids.len() -1 {
let left = state_ids[i].clone();
let right = state_ids[i+1].clone();
let accept = AcceptState(left, right);
result.push(accept);
}
return result;
})
}
pub fn state_machine<'a>() -> Parser<'a, u8, StateMachine> {
let header = keyword(b"machine") * identifier() - semi();
let raw = header
+ state_list()
+ accept_states_list();
raw.map(move |((name, states), accept_states)| StateMachine {
name,
states,
accept_states
})
}
#[cfg(test)]
mod test {
use super::*;
use std::cmp::min;
use std::path::{Path, PathBuf};
use std::{fs, io};
macro_rules! assert_consumes_all {
( $ parser: expr, $input: expr ) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
if let Err(_) = res {
panic!("parser failed to match and consume everything")
}
};
( $ parser: expr, $input: expr, $expected: expr) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
match res {
Ok(answer) => {
// it parsed, but was it right?
assert_eq!(answer, $expected)
}
Err(_) => {
//
panic!("parser failed to match and consume everything")
}
}
};
}
#[test]
fn parse_keywords() -> Result<()> {
assert_consumes_all![eol(), b"\r"];
assert_consumes_all![eol(), b"\r\n"];
assert_consumes_all![eol(), b"\n"];
assert_consumes_all![space(), b""];
assert_consumes_all![space(), b" "];
assert_consumes_all![space(), b" \t \n \r "];
assert_consumes_all![line_comment(), b"//\r"];
assert_consumes_all![line_comment(), b"//\n"];
assert_consumes_all![line_comment(), b"//\r\n"];
assert_consumes_all![line_comment(), b"// xyz \r\n"];
assert_consumes_all![star_comment(), b"/* thing */"];
assert_consumes_all![star_comment(), b"/* thing \r\n thing */"];
assert_consumes_all!(
identifier(),
b"foo"
);
assert_consumes_all!(
state_id(),
b"foo"
);
assert_consumes_all!(
accept_states_chain(),
b"foo-> bar -> baz;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
]
);
assert_consumes_all!(
accept_states_list(),
b"foo-> bar -> baz; baz -> quux;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
AcceptState(StateId("baz".into()), StateId("quux".into())),
]
);
Ok(())
}
#[test]
fn parse_state_machines() -> Result<()> {
let emptymachine = StateMachine {
name: "foo".into(),
states: Default::default(),
accept_states: vec![]
};
assert_consumes_all!(
state_machine(),
b"machine foo;",
emptymachine
);
assert_consumes_all!(
state_machine(),
b"
machine foo;
state bar \"it's a bar thing\";
state baz;
bar -> baz;
",
StateMachine {
name: "foo".into(),
states: vec![
State {
id: StateId("bar".into()),
is_starting_state: true,
description: Some("it's a bar thing".into())
},
State {
id: StateId("baz".into()),
is_starting_state: false,
description: None
},
],
accept_states: vec![
AcceptState(StateId("bar".into()), StateId("baz".into()))
]
}
);
Ok(())
}
fn count_lines(byte_slice: &[u8]) -> usize {
let line_parser = (to_eol() - eol()).repeat(0..);
let parse_result = line_parser.parse(byte_slice).unwrap();
parse_result.len()
}
#[test]
fn line_counter_works() {
let file_path_str = "assets/fsml/simple-state-machine.fsml";
let byte_vec: Vec<u8> = std::fs::read(file_path_str).unwrap();
let actual = count_lines(&byte_vec);
assert_eq!(12, actual);
}
#[test]
fn parse_state_machine_file() {
let file_path_str = "assets/fsml/simple-state-machine.fsml";
assert_parse_file(PathBuf::from_str(file_path_str).unwrap().as_path());
}
#[test]
fn parse_all_files() -> Result<()> {
let mut entries = fs::read_dir("assets/fsml")?
.map(|res| res.map(|e| e.path()))
//.filter(|f| )
.collect::<std::result::Result<Vec<_>, io::Error>>()?;
entries.sort();
for file_path_str in entries {
println!("");
println!("{}", file_path_str.to_str().unwrap());
println!("");
assert_parse_file(file_path_str.as_path());
}
Ok(())
}
fn assert_parse_file(file_path_str: &Path) {
let byte_vec: Vec<u8> = std::fs::read(file_path_str).unwrap();
let file_content =
String::from_utf8(byte_vec.clone()).expect("should be able to read the file");
let byte_slice: &[u8] = &byte_vec;
let parser = state_machine();
let parse_result = match parser.parse(byte_slice) {
Ok(parse_result) => parse_result,
Err(pom::Error::Mismatch { message, position }) => {
let start_str = &byte_vec[0..position];
let line = count_lines(start_str) + 1;
let end = min(position + 50, file_content.len() - 1);
let extract = &file_content[position..end];
let extract = extract
.to_string()
.replace("\n", "\\n")
.replace("\r", "\\r")
.replace("\t", "\\t");
let err_location = format!("{}:{}:{}", file_path_str.to_str().unwrap(), line, 1);
// thread 'idl_parser::test::parse_full_html5_file' panicked at 'whoops', src/idl_parser.rs:428:9
let better_message = format!(
"thread 'idl_parser::test::parse_full_html5_file' panicked at 'parsing', {}\n\n{}",
err_location, extract
);
println!("{}", better_message);
panic!(message)
}
Err(e) => panic!("{}", e),
};
println!("{:?}", parse_result);
}
}
| state_list | identifier_name |
parser.rs | use crate::Result;
use pom::char_class::{alpha, alphanum, multispace};
use pom::parser::*;
use std::str::FromStr;
use std::collections::HashMap;
use std::fmt::{Display, Formatter};
#[derive(Debug, Clone, PartialEq)]
pub struct StateMachine {
pub name: String,
pub states: Vec<State>,
pub accept_states: Vec<AcceptState>
}
#[derive(Debug, Clone, PartialEq)]
pub struct AcceptState(StateId, StateId);
#[derive(Debug, Clone, PartialEq)]
pub struct StateId(String);
#[derive(Debug, Clone, PartialEq)]
pub struct State {
pub id: StateId,
pub is_starting_state: bool,
pub description: Option<String>
}
impl AcceptState {
pub fn source(&self) -> &StateId {
&self.0
}
pub fn target(&self) -> &StateId {
&self.1
}
}
impl Display for StateId {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
/// space, tab, etc
fn ws<'a>() -> Parser<'a, u8, ()> {
is_a(multispace).discard()
}
/// whitespace and comments
fn space<'a>() -> Parser<'a, u8, ()> {
(ws() | comment()).repeat(0..).discard()
}
fn semi<'a>() -> Parser<'a, u8, ()> {
keyword(b";").name("semi")
}
fn to_eol<'a>() -> Parser<'a, u8, String> {
fn anything_else(term: u8) -> bool {
!is_cr(term) &&!is_lf(term)
}
is_a(anything_else)
.repeat(0..)
.map(|u8s| String::from_utf8(u8s).expect("can only parse utf"))
}
fn line_comment<'a>() -> Parser<'a, u8, ()> {
(seq(b"//") * to_eol() - eol())
.discard()
.name("line comment")
}
fn eol<'a>() -> Parser<'a, u8, ()> {
((is_a(is_cr) * is_a(is_lf)) | is_a(is_lf) | is_a(is_cr)).discard()
}
fn keyword<'a>(keyword: &'static [u8]) -> Parser<'a, u8, ()> {
literal(keyword).discard().name("keyword")
}
fn literal<'a>(literal: &'static [u8]) -> Parser<'a, u8, String> {
spaced(seq(literal))
.map(|u8s| String::from_utf8(u8s.to_vec()).expect("can only parse utf"))
.name("literal")
}
fn star_comment<'a>() -> Parser<'a, u8, ()> {
fn anything_else(term: u8) -> bool {
term!= b'*'
}
(seq(b"/*") * is_a(anything_else).repeat(0..) - seq(b"*/")).discard()
}
fn comment<'a>() -> Parser<'a, u8, ()> {
line_comment() | star_comment()
}
/// a parser wrapped in whitespace
fn spaced<'a, T>(parser: Parser<'a, u8, T>) -> Parser<'a, u8, T>
where
T: 'a,
{
space() * parser - space()
}
fn is_cr(term: u8) -> bool {
term == b'\r'
}
fn is_lf(term: u8) -> bool {
term == b'\n'
}
fn is_underscore(term: u8) -> bool {
term == b'_'
}
fn state_id<'a>() -> Parser<'a, u8, StateId> {
(identifier())
.map(|(ident)| StateId(ident))
}
fn identifier<'a>() -> Parser<'a, u8, String> {
let it = ((is_a(alpha) | is_a(is_underscore))
+ (is_a(alphanum) | is_a(is_underscore)).repeat(0..))
.map(|(first, rest)| format!("{}{}", first as char, String::from_utf8(rest).unwrap()));
spaced(it).name("name")
}
fn string<'a>() -> Parser<'a, u8, String> {
let special_char = sym(b'\\')
| sym(b'/')
| sym(b'"')
| sym(b'b').map(|_| b'\x08')
| sym(b'f').map(|_| b'\x0C')
| sym(b'n').map(|_| b'\n')
| sym(b'r').map(|_| b'\r')
| sym(b't').map(|_| b'\t');
let escape_sequence = sym(b'\\') * special_char;
let string = sym(b'"') * (none_of(b"\\\"") | escape_sequence).repeat(0..) - sym(b'"');
string.convert(String::from_utf8)
}
fn state<'a>() -> Parser<'a, u8, State> {
let raw = keyword(b"state") * identifier() + string().opt()
- semi();
raw.map(move |(identifier, description)| State {
id: StateId(identifier),
is_starting_state: false,
description
})
}
fn state_list<'a>() -> Parser<'a, u8, Vec<State>> {
fn tag_starting_state(idx: usize, state: State) -> State {
State {
is_starting_state: idx == 0,
..state
}
};
state().repeat(0..).map(|states| states.into_iter().enumerate().map(|(idx, state)| tag_starting_state(idx, state)).collect())
}
fn accept_states_list<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
accept_states_chain()
.repeat(0..)
.map(|chains| chains.into_iter().flatten().collect())
}
fn accept_states_chain<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
let raw = spaced(list(spaced(state_id()), keyword(b"->"))) - semi();
raw.map(move |(state_ids)| {
if state_ids.len() < 2 {
return vec![];
}
let mut result = vec![];
for i in 0..state_ids.len() -1 {
let left = state_ids[i].clone();
let right = state_ids[i+1].clone();
let accept = AcceptState(left, right);
result.push(accept);
}
return result;
})
}
pub fn state_machine<'a>() -> Parser<'a, u8, StateMachine> {
let header = keyword(b"machine") * identifier() - semi();
let raw = header
+ state_list()
+ accept_states_list();
raw.map(move |((name, states), accept_states)| StateMachine {
name,
states,
accept_states
})
}
#[cfg(test)]
mod test {
use super::*;
use std::cmp::min;
use std::path::{Path, PathBuf};
use std::{fs, io};
macro_rules! assert_consumes_all {
( $ parser: expr, $input: expr ) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
if let Err(_) = res {
panic!("parser failed to match and consume everything")
}
};
( $ parser: expr, $input: expr, $expected: expr) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
match res {
Ok(answer) => {
// it parsed, but was it right?
assert_eq!(answer, $expected)
}
Err(_) => {
//
panic!("parser failed to match and consume everything")
}
}
};
}
#[test]
fn parse_keywords() -> Result<()> {
assert_consumes_all![eol(), b"\r"];
assert_consumes_all![eol(), b"\r\n"];
assert_consumes_all![eol(), b"\n"];
assert_consumes_all![space(), b""];
assert_consumes_all![space(), b" "];
assert_consumes_all![space(), b" \t \n \r "];
assert_consumes_all![line_comment(), b"//\r"];
assert_consumes_all![line_comment(), b"//\n"];
assert_consumes_all![line_comment(), b"//\r\n"];
assert_consumes_all![line_comment(), b"// xyz \r\n"];
assert_consumes_all![star_comment(), b"/* thing */"];
assert_consumes_all![star_comment(), b"/* thing \r\n thing */"];
assert_consumes_all!(
identifier(),
b"foo"
);
assert_consumes_all!(
state_id(),
b"foo"
);
assert_consumes_all!(
accept_states_chain(),
b"foo-> bar -> baz;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
]
);
assert_consumes_all!(
accept_states_list(),
b"foo-> bar -> baz; baz -> quux;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
AcceptState(StateId("baz".into()), StateId("quux".into())),
]
);
Ok(())
}
#[test]
fn parse_state_machines() -> Result<()> {
let emptymachine = StateMachine {
name: "foo".into(),
states: Default::default(),
accept_states: vec![]
};
assert_consumes_all!(
state_machine(),
b"machine foo;",
emptymachine
);
assert_consumes_all!(
state_machine(),
b"
machine foo;
state bar \"it's a bar thing\";
state baz;
bar -> baz;
",
StateMachine {
name: "foo".into(),
states: vec![
State {
id: StateId("bar".into()),
is_starting_state: true,
description: Some("it's a bar thing".into())
},
State {
id: StateId("baz".into()),
is_starting_state: false,
description: None
},
],
accept_states: vec![
AcceptState(StateId("bar".into()), StateId("baz".into()))
]
}
);
Ok(())
}
fn count_lines(byte_slice: &[u8]) -> usize |
#[test]
fn line_counter_works() {
let file_path_str = "assets/fsml/simple-state-machine.fsml";
let byte_vec: Vec<u8> = std::fs::read(file_path_str).unwrap();
let actual = count_lines(&byte_vec);
assert_eq!(12, actual);
}
#[test]
fn parse_state_machine_file() {
let file_path_str = "assets/fsml/simple-state-machine.fsml";
assert_parse_file(PathBuf::from_str(file_path_str).unwrap().as_path());
}
#[test]
fn parse_all_files() -> Result<()> {
let mut entries = fs::read_dir("assets/fsml")?
.map(|res| res.map(|e| e.path()))
//.filter(|f| )
.collect::<std::result::Result<Vec<_>, io::Error>>()?;
entries.sort();
for file_path_str in entries {
println!("");
println!("{}", file_path_str.to_str().unwrap());
println!("");
assert_parse_file(file_path_str.as_path());
}
Ok(())
}
fn assert_parse_file(file_path_str: &Path) {
let byte_vec: Vec<u8> = std::fs::read(file_path_str).unwrap();
let file_content =
String::from_utf8(byte_vec.clone()).expect("should be able to read the file");
let byte_slice: &[u8] = &byte_vec;
let parser = state_machine();
let parse_result = match parser.parse(byte_slice) {
Ok(parse_result) => parse_result,
Err(pom::Error::Mismatch { message, position }) => {
let start_str = &byte_vec[0..position];
let line = count_lines(start_str) + 1;
let end = min(position + 50, file_content.len() - 1);
let extract = &file_content[position..end];
let extract = extract
.to_string()
.replace("\n", "\\n")
.replace("\r", "\\r")
.replace("\t", "\\t");
let err_location = format!("{}:{}:{}", file_path_str.to_str().unwrap(), line, 1);
// thread 'idl_parser::test::parse_full_html5_file' panicked at 'whoops', src/idl_parser.rs:428:9
let better_message = format!(
"thread 'idl_parser::test::parse_full_html5_file' panicked at 'parsing', {}\n\n{}",
err_location, extract
);
println!("{}", better_message);
panic!(message)
}
Err(e) => panic!("{}", e),
};
println!("{:?}", parse_result);
}
}
| {
let line_parser = (to_eol() - eol()).repeat(0..);
let parse_result = line_parser.parse(byte_slice).unwrap();
parse_result.len()
} | identifier_body |
parser.rs | use crate::Result;
use pom::char_class::{alpha, alphanum, multispace};
use pom::parser::*;
use std::str::FromStr;
use std::collections::HashMap;
use std::fmt::{Display, Formatter};
#[derive(Debug, Clone, PartialEq)]
pub struct StateMachine {
pub name: String,
pub states: Vec<State>,
pub accept_states: Vec<AcceptState>
}
#[derive(Debug, Clone, PartialEq)]
pub struct AcceptState(StateId, StateId);
#[derive(Debug, Clone, PartialEq)]
pub struct StateId(String);
#[derive(Debug, Clone, PartialEq)]
pub struct State {
pub id: StateId,
pub is_starting_state: bool,
pub description: Option<String>
}
impl AcceptState {
pub fn source(&self) -> &StateId {
&self.0
}
pub fn target(&self) -> &StateId {
&self.1
}
}
impl Display for StateId {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
/// space, tab, etc
fn ws<'a>() -> Parser<'a, u8, ()> {
is_a(multispace).discard()
}
/// whitespace and comments
fn space<'a>() -> Parser<'a, u8, ()> {
(ws() | comment()).repeat(0..).discard()
}
fn semi<'a>() -> Parser<'a, u8, ()> {
keyword(b";").name("semi")
}
fn to_eol<'a>() -> Parser<'a, u8, String> {
fn anything_else(term: u8) -> bool {
!is_cr(term) &&!is_lf(term)
}
is_a(anything_else)
.repeat(0..)
.map(|u8s| String::from_utf8(u8s).expect("can only parse utf"))
}
fn line_comment<'a>() -> Parser<'a, u8, ()> {
(seq(b"//") * to_eol() - eol())
.discard()
.name("line comment")
}
fn eol<'a>() -> Parser<'a, u8, ()> {
((is_a(is_cr) * is_a(is_lf)) | is_a(is_lf) | is_a(is_cr)).discard()
}
fn keyword<'a>(keyword: &'static [u8]) -> Parser<'a, u8, ()> {
literal(keyword).discard().name("keyword")
}
fn literal<'a>(literal: &'static [u8]) -> Parser<'a, u8, String> {
spaced(seq(literal))
.map(|u8s| String::from_utf8(u8s.to_vec()).expect("can only parse utf"))
.name("literal")
}
fn star_comment<'a>() -> Parser<'a, u8, ()> {
fn anything_else(term: u8) -> bool {
term!= b'*'
}
(seq(b"/*") * is_a(anything_else).repeat(0..) - seq(b"*/")).discard()
}
fn comment<'a>() -> Parser<'a, u8, ()> {
line_comment() | star_comment()
}
/// a parser wrapped in whitespace
fn spaced<'a, T>(parser: Parser<'a, u8, T>) -> Parser<'a, u8, T>
where
T: 'a,
{
space() * parser - space()
}
fn is_cr(term: u8) -> bool {
term == b'\r'
}
fn is_lf(term: u8) -> bool {
term == b'\n'
}
fn is_underscore(term: u8) -> bool {
term == b'_'
}
fn state_id<'a>() -> Parser<'a, u8, StateId> {
(identifier())
.map(|(ident)| StateId(ident))
}
fn identifier<'a>() -> Parser<'a, u8, String> {
let it = ((is_a(alpha) | is_a(is_underscore))
+ (is_a(alphanum) | is_a(is_underscore)).repeat(0..))
.map(|(first, rest)| format!("{}{}", first as char, String::from_utf8(rest).unwrap()));
spaced(it).name("name")
}
fn string<'a>() -> Parser<'a, u8, String> {
let special_char = sym(b'\\')
| sym(b'/')
| sym(b'"')
| sym(b'b').map(|_| b'\x08')
| sym(b'f').map(|_| b'\x0C')
| sym(b'n').map(|_| b'\n')
| sym(b'r').map(|_| b'\r')
| sym(b't').map(|_| b'\t');
let escape_sequence = sym(b'\\') * special_char;
let string = sym(b'"') * (none_of(b"\\\"") | escape_sequence).repeat(0..) - sym(b'"');
string.convert(String::from_utf8)
}
fn state<'a>() -> Parser<'a, u8, State> {
let raw = keyword(b"state") * identifier() + string().opt()
- semi();
raw.map(move |(identifier, description)| State {
id: StateId(identifier),
is_starting_state: false,
description
})
}
fn state_list<'a>() -> Parser<'a, u8, Vec<State>> {
fn tag_starting_state(idx: usize, state: State) -> State {
State {
is_starting_state: idx == 0,
..state
}
};
state().repeat(0..).map(|states| states.into_iter().enumerate().map(|(idx, state)| tag_starting_state(idx, state)).collect())
}
fn accept_states_list<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
accept_states_chain()
.repeat(0..)
.map(|chains| chains.into_iter().flatten().collect())
}
fn accept_states_chain<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
let raw = spaced(list(spaced(state_id()), keyword(b"->"))) - semi();
raw.map(move |(state_ids)| {
if state_ids.len() < 2 {
return vec![];
}
let mut result = vec![];
for i in 0..state_ids.len() -1 {
let left = state_ids[i].clone();
let right = state_ids[i+1].clone();
let accept = AcceptState(left, right);
result.push(accept);
}
return result;
})
}
pub fn state_machine<'a>() -> Parser<'a, u8, StateMachine> {
let header = keyword(b"machine") * identifier() - semi();
let raw = header
+ state_list()
+ accept_states_list();
raw.map(move |((name, states), accept_states)| StateMachine {
name,
states,
accept_states
})
}
#[cfg(test)]
mod test {
use super::*;
use std::cmp::min;
use std::path::{Path, PathBuf};
use std::{fs, io};
macro_rules! assert_consumes_all {
( $ parser: expr, $input: expr ) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
if let Err(_) = res {
panic!("parser failed to match and consume everything")
}
};
( $ parser: expr, $input: expr, $expected: expr) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
match res {
Ok(answer) => {
// it parsed, but was it right?
assert_eq!(answer, $expected)
}
Err(_) => {
//
panic!("parser failed to match and consume everything")
}
}
};
}
#[test]
fn parse_keywords() -> Result<()> {
assert_consumes_all![eol(), b"\r"];
assert_consumes_all![eol(), b"\r\n"];
assert_consumes_all![eol(), b"\n"];
assert_consumes_all![space(), b""];
assert_consumes_all![space(), b" "];
assert_consumes_all![space(), b" \t \n \r "];
assert_consumes_all![line_comment(), b"//\r"];
assert_consumes_all![line_comment(), b"//\n"];
assert_consumes_all![line_comment(), b"//\r\n"];
assert_consumes_all![line_comment(), b"// xyz \r\n"];
assert_consumes_all![star_comment(), b"/* thing */"];
assert_consumes_all![star_comment(), b"/* thing \r\n thing */"];
assert_consumes_all!(
identifier(),
b"foo"
);
assert_consumes_all!(
state_id(),
b"foo"
);
assert_consumes_all!(
accept_states_chain(),
b"foo-> bar -> baz;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
]
);
assert_consumes_all!(
accept_states_list(),
b"foo-> bar -> baz; baz -> quux;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
AcceptState(StateId("baz".into()), StateId("quux".into())),
]
);
Ok(())
}
#[test]
fn parse_state_machines() -> Result<()> {
let emptymachine = StateMachine {
name: "foo".into(),
states: Default::default(),
accept_states: vec![]
};
assert_consumes_all!(
state_machine(),
b"machine foo;",
emptymachine
);
assert_consumes_all!(
state_machine(),
b"
machine foo;
state bar \"it's a bar thing\";
state baz;
bar -> baz;
",
StateMachine {
name: "foo".into(),
states: vec![
State {
id: StateId("bar".into()),
is_starting_state: true,
description: Some("it's a bar thing".into())
},
State {
id: StateId("baz".into()),
is_starting_state: false,
description: None
},
],
accept_states: vec![
AcceptState(StateId("bar".into()), StateId("baz".into()))
]
}
);
Ok(())
}
fn count_lines(byte_slice: &[u8]) -> usize {
let line_parser = (to_eol() - eol()).repeat(0..);
let parse_result = line_parser.parse(byte_slice).unwrap();
parse_result.len()
}
#[test]
fn line_counter_works() {
let file_path_str = "assets/fsml/simple-state-machine.fsml";
let byte_vec: Vec<u8> = std::fs::read(file_path_str).unwrap();
let actual = count_lines(&byte_vec);
assert_eq!(12, actual);
}
#[test]
fn parse_state_machine_file() {
let file_path_str = "assets/fsml/simple-state-machine.fsml";
assert_parse_file(PathBuf::from_str(file_path_str).unwrap().as_path());
}
#[test]
fn parse_all_files() -> Result<()> {
let mut entries = fs::read_dir("assets/fsml")?
.map(|res| res.map(|e| e.path()))
//.filter(|f| )
.collect::<std::result::Result<Vec<_>, io::Error>>()?;
entries.sort();
for file_path_str in entries {
println!("");
println!("{}", file_path_str.to_str().unwrap());
println!("");
assert_parse_file(file_path_str.as_path());
}
Ok(())
}
fn assert_parse_file(file_path_str: &Path) {
let byte_vec: Vec<u8> = std::fs::read(file_path_str).unwrap();
let file_content =
String::from_utf8(byte_vec.clone()).expect("should be able to read the file");
let byte_slice: &[u8] = &byte_vec;
let parser = state_machine();
let parse_result = match parser.parse(byte_slice) {
Ok(parse_result) => parse_result,
Err(pom::Error::Mismatch { message, position }) => |
Err(e) => panic!("{}", e),
};
println!("{:?}", parse_result);
}
}
| {
let start_str = &byte_vec[0..position];
let line = count_lines(start_str) + 1;
let end = min(position + 50, file_content.len() - 1);
let extract = &file_content[position..end];
let extract = extract
.to_string()
.replace("\n", "\\n")
.replace("\r", "\\r")
.replace("\t", "\\t");
let err_location = format!("{}:{}:{}", file_path_str.to_str().unwrap(), line, 1);
// thread 'idl_parser::test::parse_full_html5_file' panicked at 'whoops', src/idl_parser.rs:428:9
let better_message = format!(
"thread 'idl_parser::test::parse_full_html5_file' panicked at 'parsing', {}\n\n{}",
err_location, extract
);
println!("{}", better_message);
panic!(message)
} | conditional_block |
parser.rs | use crate::Result;
use pom::char_class::{alpha, alphanum, multispace};
use pom::parser::*;
use std::str::FromStr;
use std::collections::HashMap;
use std::fmt::{Display, Formatter};
#[derive(Debug, Clone, PartialEq)]
pub struct StateMachine {
pub name: String,
pub states: Vec<State>,
pub accept_states: Vec<AcceptState>
}
#[derive(Debug, Clone, PartialEq)]
pub struct AcceptState(StateId, StateId);
#[derive(Debug, Clone, PartialEq)]
pub struct StateId(String);
#[derive(Debug, Clone, PartialEq)]
pub struct State {
pub id: StateId,
pub is_starting_state: bool,
pub description: Option<String>
}
impl AcceptState {
pub fn source(&self) -> &StateId {
&self.0
}
pub fn target(&self) -> &StateId {
&self.1
}
}
impl Display for StateId {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
/// space, tab, etc
fn ws<'a>() -> Parser<'a, u8, ()> {
is_a(multispace).discard()
}
/// whitespace and comments
fn space<'a>() -> Parser<'a, u8, ()> {
(ws() | comment()).repeat(0..).discard()
}
fn semi<'a>() -> Parser<'a, u8, ()> {
keyword(b";").name("semi")
}
fn to_eol<'a>() -> Parser<'a, u8, String> {
fn anything_else(term: u8) -> bool {
!is_cr(term) &&!is_lf(term)
}
is_a(anything_else)
.repeat(0..)
.map(|u8s| String::from_utf8(u8s).expect("can only parse utf"))
}
fn line_comment<'a>() -> Parser<'a, u8, ()> {
(seq(b"//") * to_eol() - eol())
.discard()
.name("line comment")
}
fn eol<'a>() -> Parser<'a, u8, ()> {
((is_a(is_cr) * is_a(is_lf)) | is_a(is_lf) | is_a(is_cr)).discard()
}
fn keyword<'a>(keyword: &'static [u8]) -> Parser<'a, u8, ()> {
literal(keyword).discard().name("keyword")
}
fn literal<'a>(literal: &'static [u8]) -> Parser<'a, u8, String> {
spaced(seq(literal))
.map(|u8s| String::from_utf8(u8s.to_vec()).expect("can only parse utf"))
.name("literal")
}
fn star_comment<'a>() -> Parser<'a, u8, ()> {
fn anything_else(term: u8) -> bool {
term!= b'*'
}
(seq(b"/*") * is_a(anything_else).repeat(0..) - seq(b"*/")).discard()
}
fn comment<'a>() -> Parser<'a, u8, ()> {
line_comment() | star_comment()
}
/// a parser wrapped in whitespace
fn spaced<'a, T>(parser: Parser<'a, u8, T>) -> Parser<'a, u8, T>
where
T: 'a,
{
space() * parser - space()
}
fn is_cr(term: u8) -> bool {
term == b'\r'
}
fn is_lf(term: u8) -> bool {
term == b'\n'
}
fn is_underscore(term: u8) -> bool {
term == b'_'
}
fn state_id<'a>() -> Parser<'a, u8, StateId> {
(identifier())
.map(|(ident)| StateId(ident))
}
fn identifier<'a>() -> Parser<'a, u8, String> {
let it = ((is_a(alpha) | is_a(is_underscore))
+ (is_a(alphanum) | is_a(is_underscore)).repeat(0..))
.map(|(first, rest)| format!("{}{}", first as char, String::from_utf8(rest).unwrap()));
spaced(it).name("name")
}
fn string<'a>() -> Parser<'a, u8, String> {
let special_char = sym(b'\\')
| sym(b'/')
| sym(b'"')
| sym(b'b').map(|_| b'\x08')
| sym(b'f').map(|_| b'\x0C')
| sym(b'n').map(|_| b'\n')
| sym(b'r').map(|_| b'\r')
| sym(b't').map(|_| b'\t');
let escape_sequence = sym(b'\\') * special_char;
let string = sym(b'"') * (none_of(b"\\\"") | escape_sequence).repeat(0..) - sym(b'"');
string.convert(String::from_utf8)
}
fn state<'a>() -> Parser<'a, u8, State> {
let raw = keyword(b"state") * identifier() + string().opt()
- semi();
raw.map(move |(identifier, description)| State {
id: StateId(identifier),
is_starting_state: false,
description
})
}
fn state_list<'a>() -> Parser<'a, u8, Vec<State>> {
fn tag_starting_state(idx: usize, state: State) -> State {
State {
is_starting_state: idx == 0,
..state | fn accept_states_list<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
accept_states_chain()
.repeat(0..)
.map(|chains| chains.into_iter().flatten().collect())
}
fn accept_states_chain<'a>() -> Parser<'a, u8, Vec<AcceptState>> {
let raw = spaced(list(spaced(state_id()), keyword(b"->"))) - semi();
raw.map(move |(state_ids)| {
if state_ids.len() < 2 {
return vec![];
}
let mut result = vec![];
for i in 0..state_ids.len() -1 {
let left = state_ids[i].clone();
let right = state_ids[i+1].clone();
let accept = AcceptState(left, right);
result.push(accept);
}
return result;
})
}
pub fn state_machine<'a>() -> Parser<'a, u8, StateMachine> {
let header = keyword(b"machine") * identifier() - semi();
let raw = header
+ state_list()
+ accept_states_list();
raw.map(move |((name, states), accept_states)| StateMachine {
name,
states,
accept_states
})
}
#[cfg(test)]
mod test {
use super::*;
use std::cmp::min;
use std::path::{Path, PathBuf};
use std::{fs, io};
macro_rules! assert_consumes_all {
( $ parser: expr, $input: expr ) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
if let Err(_) = res {
panic!("parser failed to match and consume everything")
}
};
( $ parser: expr, $input: expr, $expected: expr) => {
let terminating_parser = $parser - space() - end();
let res = terminating_parser.parse($input);
match res {
Ok(answer) => {
// it parsed, but was it right?
assert_eq!(answer, $expected)
}
Err(_) => {
//
panic!("parser failed to match and consume everything")
}
}
};
}
#[test]
fn parse_keywords() -> Result<()> {
assert_consumes_all![eol(), b"\r"];
assert_consumes_all![eol(), b"\r\n"];
assert_consumes_all![eol(), b"\n"];
assert_consumes_all![space(), b""];
assert_consumes_all![space(), b" "];
assert_consumes_all![space(), b" \t \n \r "];
assert_consumes_all![line_comment(), b"//\r"];
assert_consumes_all![line_comment(), b"//\n"];
assert_consumes_all![line_comment(), b"//\r\n"];
assert_consumes_all![line_comment(), b"// xyz \r\n"];
assert_consumes_all![star_comment(), b"/* thing */"];
assert_consumes_all![star_comment(), b"/* thing \r\n thing */"];
assert_consumes_all!(
identifier(),
b"foo"
);
assert_consumes_all!(
state_id(),
b"foo"
);
assert_consumes_all!(
accept_states_chain(),
b"foo-> bar -> baz;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
]
);
assert_consumes_all!(
accept_states_list(),
b"foo-> bar -> baz; baz -> quux;",
vec![
AcceptState(StateId("foo".into()), StateId("bar".into())),
AcceptState(StateId("bar".into()), StateId("baz".into())),
AcceptState(StateId("baz".into()), StateId("quux".into())),
]
);
Ok(())
}
#[test]
fn parse_state_machines() -> Result<()> {
let emptymachine = StateMachine {
name: "foo".into(),
states: Default::default(),
accept_states: vec![]
};
assert_consumes_all!(
state_machine(),
b"machine foo;",
emptymachine
);
assert_consumes_all!(
state_machine(),
b"
machine foo;
state bar \"it's a bar thing\";
state baz;
bar -> baz;
",
StateMachine {
name: "foo".into(),
states: vec![
State {
id: StateId("bar".into()),
is_starting_state: true,
description: Some("it's a bar thing".into())
},
State {
id: StateId("baz".into()),
is_starting_state: false,
description: None
},
],
accept_states: vec![
AcceptState(StateId("bar".into()), StateId("baz".into()))
]
}
);
Ok(())
}
fn count_lines(byte_slice: &[u8]) -> usize {
let line_parser = (to_eol() - eol()).repeat(0..);
let parse_result = line_parser.parse(byte_slice).unwrap();
parse_result.len()
}
#[test]
fn line_counter_works() {
let file_path_str = "assets/fsml/simple-state-machine.fsml";
let byte_vec: Vec<u8> = std::fs::read(file_path_str).unwrap();
let actual = count_lines(&byte_vec);
assert_eq!(12, actual);
}
#[test]
fn parse_state_machine_file() {
let file_path_str = "assets/fsml/simple-state-machine.fsml";
assert_parse_file(PathBuf::from_str(file_path_str).unwrap().as_path());
}
#[test]
fn parse_all_files() -> Result<()> {
let mut entries = fs::read_dir("assets/fsml")?
.map(|res| res.map(|e| e.path()))
//.filter(|f| )
.collect::<std::result::Result<Vec<_>, io::Error>>()?;
entries.sort();
for file_path_str in entries {
println!("");
println!("{}", file_path_str.to_str().unwrap());
println!("");
assert_parse_file(file_path_str.as_path());
}
Ok(())
}
fn assert_parse_file(file_path_str: &Path) {
let byte_vec: Vec<u8> = std::fs::read(file_path_str).unwrap();
let file_content =
String::from_utf8(byte_vec.clone()).expect("should be able to read the file");
let byte_slice: &[u8] = &byte_vec;
let parser = state_machine();
let parse_result = match parser.parse(byte_slice) {
Ok(parse_result) => parse_result,
Err(pom::Error::Mismatch { message, position }) => {
let start_str = &byte_vec[0..position];
let line = count_lines(start_str) + 1;
let end = min(position + 50, file_content.len() - 1);
let extract = &file_content[position..end];
let extract = extract
.to_string()
.replace("\n", "\\n")
.replace("\r", "\\r")
.replace("\t", "\\t");
let err_location = format!("{}:{}:{}", file_path_str.to_str().unwrap(), line, 1);
// thread 'idl_parser::test::parse_full_html5_file' panicked at 'whoops', src/idl_parser.rs:428:9
let better_message = format!(
"thread 'idl_parser::test::parse_full_html5_file' panicked at 'parsing', {}\n\n{}",
err_location, extract
);
println!("{}", better_message);
panic!(message)
}
Err(e) => panic!("{}", e),
};
println!("{:?}", parse_result);
}
} | }
};
state().repeat(0..).map(|states| states.into_iter().enumerate().map(|(idx, state)| tag_starting_state(idx, state)).collect())
}
| random_line_split |
lib.rs | // (A,X,E,R,S)
//
// Lizzie Borden took an axe
// And gave her mother forty whacks.
// When she saw what she had done,
// She gave her father forty-one.
//
#![feature(struct_variant)]
#![allow(dead_code)]
#![allow(uppercase_variables)]
#![allow(unused_variable)]
#![allow(unused_imports)]
//#![allow(visible_private_types)]
use std::collections::hashmap::HashMap;
mod r0;
mod r1;
// scheme kinda source-language
#[deriving(Clone)]
pub enum CoreLanguage {
// <core> → <object>
// <core> → <variable>
// <core> → (quote <object>)
// <core> → (lambda (<variable>... ) <core>)
// <core> → (if <core> <core> <core>)
// <core> → (set! <variable> <core>)
// <core> → (call/cc <core>)
// <core> → (<core> <core>... )
Object(Obj),
Variable(String),
Quote(Core),
Lambda(Vec<String>, Core),
If(Core, Core, Core),
Set(String, Core),
CallCC(Core),
List(Vec<Core>)
}
pub type Core = Box<CoreLanguage>;
#[deriving(Clone)]
pub enum Obj {
ONil,
OBool(bool),
OInt(i32),
OFloat(f32),
OStr(String),
OClosure(Closure)
}
//(define compile
// (lambda (x next)
// (cond
// [(symbol? x)
// (list ’refer x next)]
// [(pair? x)
// (record-case x
// [quote (obj)
// (list ’constant obj next)]
// [lambda (vars body)
// (list ’close vars (compile body ’(return)) next)]
// [if (test then else)
// (let ([thenc (compile then next)]
// [elsec (compile else next)])
// (compile test (list ’test thenc elsec)))]
// [set! (var x)
// (compile x (list ’assign var next))]
// [call/cc (x)
// (let ([c (list ’conti
// (list ’argument
// (compile x ’(apply))))])
// (if (tail? next)
// c
// (list ’frame next c)))]
// [else
// (recur loop ([args (cdr x)]
// [c (compile (car x) ’(apply))])
// (if (null? args)
// (if (tail? next)
// c
// (list ’frame next c))
// (loop (cdr args)
// (compile (car args)
// (list ’argument c)))))])]
// [else
// (list ’constant x next)])))
pub fn compile(x: CoreLanguage, next: Code) -> Code {
match x {
Variable(str) => {
box REFER{var:str, k:next}
},
Quote(obj) => {
box CONSTANT{obj:ONil, k:next}
},
Lambda(vars, body) => {
box CLOSE{ vars:vars, body:compile(*body, box RETURN{unused:true}), k:next }
},
If(test, seq, alt) => {
let thenc = compile(*seq, next.clone());
let elsec = compile(*alt, next.clone());
compile(*test, box TEST{kthen:thenc, kelse:elsec})
},
Set(var, x) => {
compile(*x, box ASSIGN{var:var, k:next} )
},
CallCC(x) => {
let c = box CONTI{
k: box ARGUMENT{ k:compile(*x, box APPLY{unused:true}) }
};
if is_tail(&next) { c } else { box FRAME{k:next, ret:c} }
},
List(x) => {
let args = x.slice_from(1);
let mut c = compile((*x[0]).clone(), box APPLY{unused:true});
for arg in args.iter() {
c = compile((**arg).clone(), box ARGUMENT{k:c});
}
if is_tail(&next) { c } else { box FRAME{k:next, ret:c} }
}
_ =>
{ box CONSTANT{obj:ONil /*x*/, k:next} }
}
}
fn is_tail(x: &Code) -> bool {
match **x {
RETURN{..} => true,
_ => false
}
}
///////////////////////////////////////////////////////////////////////////////
// Opcode
// these are a dozen primitive instructions that implement scheme-like
// semantics. This is applicative-order lambda calculus with lexically-scoped
// environments: everything reduces to function calls where arguments are
// evaluated before application of function to arguments; variables are
// bound in their static (lexical) scope like Scheme, not in their dynamic
// (runtime) scope like earlier Lisps.
// Execution model is heap-based and there is support for call-with-current-continuation
// so exception semantics can be implemented easily in terms of call/cc.
#[deriving(Clone)]
pub enum Opcode {
HALT {unused:bool},
REFER {var: String, k: Code},
CONSTANT {obj: Obj, k: Code},
CLOSE {vars: Vec<String>, body: Code, k: Code},
TEST {kthen: Code, kelse: Code},
ASSIGN {var: String, k: Code},
CONTI {k: Code},
NUATE {s: Frame, var: String},
FRAME {k: Code, ret: Code},
ARGUMENT {k: Code},
APPLY {unused:bool},
INVOKE {method: String, k: Code},
RETURN {unused:bool},
}
pub type Code = Box<Opcode>;
/// Scope is a dynamic environment: a set of bindings, implemented
/// as a map from variable names (as Str, representing symbols)
/// to runtime value (Obj? if not typing; or AxonVal derivatives)
#[deriving(Clone)]
struct Scope {
parent: Option<Box<Scope>>, // link to enclosing scope
//local: HashMap<String, Obj>// local vars (conceptually includes fn params)
vars: Vec<String>,
vals: Vec<Obj>
}
impl Scope
{
fn new(parent:Option<Box<Scope>>) -> Scope {
Scope { parent:parent, vars:vec!(), vals:vec!() }
}
fn get(&self, var: &String) -> Option<Obj> {
let ix_opt = self.vars.iter().position(|v| { v == var });
match ix_opt {
Some(ix) => Some(self.vals[ix].clone()),
None => None
}
}
fn set(&mut self, var: &String, val: Obj) {
| let ix_opt = self.vars.iter().position(|v| { v == var });
match ix_opt {
Some(ix) => { *self.vals.get_mut(ix) = val },
None => self.vals.push(val)
};
}
fn extend(&self, vars: Vec<String>, vals: Vec<Obj>) -> Scope {
Scope{
parent: Some(box self.clone()),
vars: vars,
vals: vals
}
}
}
/// Frame is the dynamic (runtime) representation of a function
/// execution. It captures the caller-frame and return-address,
/// so the complete dynamic context can be traced by walking back
/// thru the caller links; a bindings context (which pushes and
/// pops local scopes for variable definitions); the AST for the
/// function's code; and the instruction-pointer which indicates
/// the current point of execution in the code.
#[deriving(Clone)]
struct Frame {
// *X* when this frame returns, exec. resumes from caller.code[ret] (ie. ret is index into code of caller)
ret: Code,
// *E* parms,locals
bindings: Scope,
// *R* accumulator of arg vals, to be combined w/ param names in extending env
valueRib: Vec<Obj>,
// *S* previous frame
caller: Option<Box<Frame>>,
//code: Code //belongs in Frame (there's a frame for every lambda definition)
}
impl Frame {
fn make(env:Scope, rib: Vec<Obj>, ret: Code, caller: Option<Box<Frame>>)
-> Frame
{
Frame { bindings:env, valueRib:rib, ret:ret, caller:caller }
}
}
/// closure captures the environment where it was created; when called,
/// it binds its params to actual-arg values (in left-to-right listed order)
/// and extends its environment with those bindings, and executes its
/// body with that extended environment.
#[deriving(Clone)]
pub struct Closure {
// names of parameters to be applied to closure
params: Vec<String>,
// static environment (lexical scope, captures scopes enclosing definition)
env: Scope,
// code implementing body of closure.
body: Code
}
impl Closure {
fn make(params: Vec<String>, env: Scope, body: Code) -> Closure {
Closure { params:params, env:env, body:body }
}
}
/// The VM below is fundamentally a state machine, of course, and
/// the five registers capture the entire current-state of that machine.
struct VMState
{
/////////////////////////////////////////////////////////////////////
// Machine Registers
// accumulator (most-recently-evaluated-expression value)
A: Obj,
// next instruction to be executed (source is compiled into a directed-graph of Opcode)
X: Code,
// current (lexical) environment (bindings map, context,...)
E: Scope,
// value rib (accumulator for values of arguments to a fn application)
R: Vec<Obj>,
// control stack (ptr to top call frame; frames have link to prev frame)
S: Frame
}
impl VMState {
fn make(a:Obj, x:Code, e:Scope, r:Vec<Obj>, s:Frame) -> VMState {
VMState { A:a, X:x, E:e, R:r, S:s }
}
fn accumulator(&self) -> &Obj { &self.A }
fn program(&self) -> &Code { &self.X }
fn environment(&self) -> &Scope { &self.E }
fn arguments(&self) -> &Vec<Obj> { &self.R }
fn stackframe(&self) -> &Frame { &self.S }
}
///////////////////////////////////////////////////////////////////////////////
// axon machine: definition and implementation of virtual machine for
// scheme-like semantics
//
// let code be an in-memory graph (DAG) of instructions, where the entry-point
// to a sub-program is a single instruction.
// let instruction be a composition of:
// - opcode, an enumeration identifying its type
// - operands, compile-time constant arguments to the instruction
// - links, 0, 1, or 2 links to successor-instructions.
// note the single exception: the 'nuate instruction takes a Frame
// argument. This means that (as written), compiled code that
// includes call/cc won't be serializable, because the live control-stack
// frames aren't serializable. This only matters if we start thinking
// about serializing execution-in-process code and moving it to a
// different machine for resumption.
//...
//...
// A VM with 5 registers, 12 primitive instructions, and
// 3 basic data structures:
// - Frame captures a call-frame and maintains a dynamic control stack
// - Scope manages bindings of variables to values in lexically nested scopes
// - Closure binds parameters to actual args and executes code
struct Machine {
state: VMState
}
impl Machine
{
fn init(state: VMState) -> Machine { Machine { state:state } }
fn step(&mut self) -> Option<Obj> {
let (mut A,X,mut E,mut R,mut S) = (
self.state.A.clone(),
self.state.X.clone(),
self.state.E.clone(),
self.state.R.clone(),
self.state.S.clone()
);
let x = match *X {
// case HALT : return // and return A
HALT {..} => {
box HALT {unused:true}
},
// case REFER : I: REFER ; A = E[I.var]; X = I.next
REFER {var:ref var, k:ref k} => {
A = E.get(var).expect("yowza");
k.clone()
},
// case CONSTANT: I: CONSTANT; A = I.obj; X = I.next
CONSTANT {obj:ref obj, k:ref k} => {
A = obj.clone();
k.clone()
},
// case CLOSE : I: CLOSE ; A = Closure(I.vars, E, I.body); X = I.next
CLOSE {vars:ref vars, body:ref body, k:ref k} => {
let a = Closure { params:vars.clone(), env:E.clone(), body:body.clone() };
A = OClosure(a);
k.clone()
},
// case TEST : I: TEST ; X = (A == true)? I.thenc : I.elsec
TEST {kthen:ref kthen, kelse:ref kelse} => {
let k = //if A == true { kthen } else { kelse };
match A {
OBool(true) => { kthen },
//OBool(false) => { kelse },
_ => { kelse }
};
k.clone()
},
// case ASSIGN : I: ASSIGN ; E[I.var] = A; X = I.next
ASSIGN {var:ref var, k:ref k} => {
E.set(var, A.clone());
k.clone()
},
// case CONTI : I: CONTI ; A = capture_cc(S); X = I.next
CONTI {k:ref k} => {
let a = Machine::capture_cc(&S);
A = OClosure(a);
k.clone()
},
// case NUATE : I: NUATE ; A = E[I.var]; X = RETURN;
NUATE {s:ref s, var:ref var} => {
A = E.get(var).expect("yup");
box RETURN {unused:true}
},
// case FRAME : I: FRAME ; S = Frame(E, R, I.ret, S); R = [,]; X = I.next
FRAME {k:ref k, ret:ref ret} => {
let s = Frame {
ret: ret.clone(),
bindings: E.clone(),
valueRib: R.clone(),
caller:Some(box S.clone())
};
S = s;
R = vec!();
k.clone()
},
// case ARGUMENT: I: ARGUMENT; R.add(A); X = I.next
ARGUMENT {k:ref k} => {
R.push(A.clone());
k.clone()
},
// case APPLY : I: APPLY ; closure := (AxonClosure) A
// vals := R
// vars := closure.params
// E = closure.env.extend(vars, vals)
// R = [,]
// X = closure.body
APPLY {..} => {
let closure = match A {
OClosure(ref clo) => { clo.clone() },
_ => fail!("yo! no clo")
};
let vals = R;
R = vec!();
let vars = closure.params.clone();
E = closure.env.extend(vars, vals);
closure.body
},
// case INVOKE : I: INVOKE ; obj := A
// // meth := obj.typeof.slot[I.method]
// args := (Obj?[]) R
// // A = meth.invoke(obj, args)
// R = [,]
// X = I.next
INVOKE {method:ref method, k:ref code} => {
let f = match A {
OClosure(ref clo) => { clo.clone() },
_ => fail!("no clo no mo")
};
let args = R;
R = vec!();
//TODO: A = (f)(args);
code.clone()
},
// case RETURN : I: RETURN ; X = S.ret; E = S.bindings; R = S.valueRib; S = S.caller
RETURN {..} => {
let x = S.ret;
E = S.bindings;
R = S.valueRib;
S = *S.caller.expect("DCM,ICU");
x
},
};
let retval = A.clone();
self.state = VMState {
A:A,
X:X,
E:E,
R:R,
S:S
};
//notifyObservers
Some(retval)
}
fn done(&self) -> bool {
match *self.state.X {
HALT {..} => { true },
_ => { false }
}
}
fn run(&mut self) -> Option<Obj> {
loop {
let retval = self.step();
if self.done() {
return retval;
}
}
}
/// a continuation is a closure that in addition has access to the frame
/// in which it was created (where call/cc was called).
/// the body of a continuation closure, when executed, restores the
/// saved frame (which includes its calling frames) (pg. 50)
///
/// a continuation generates a closure that captures
/// the current control stack; the body of the generated
/// closure is an instruction that will restore the
/// captured stack.
fn capture_cc(s: &Frame) -> Closure {
let v = "__V__";
let body = box NUATE{ s:s.clone(), var:v.to_string() };
let env = Scope::new(None);
let vars = vec!(v.to_string());
Closure { params:vars, env:env, body:body }
}
} | identifier_name |
|
lib.rs | // (A,X,E,R,S)
//
// Lizzie Borden took an axe
// And gave her mother forty whacks.
// When she saw what she had done,
// She gave her father forty-one.
//
#![feature(struct_variant)]
#![allow(dead_code)]
#![allow(uppercase_variables)]
#![allow(unused_variable)]
#![allow(unused_imports)]
//#![allow(visible_private_types)]
use std::collections::hashmap::HashMap;
mod r0;
mod r1;
// scheme kinda source-language
#[deriving(Clone)]
pub enum CoreLanguage {
// <core> → <object>
// <core> → <variable>
// <core> → (quote <object>)
// <core> → (lambda (<variable>... ) <core>)
// <core> → (if <core> <core> <core>)
// <core> → (set! <variable> <core>)
// <core> → (call/cc <core>)
// <core> → (<core> <core>... )
Object(Obj),
Variable(String),
Quote(Core),
Lambda(Vec<String>, Core),
If(Core, Core, Core),
Set(String, Core),
CallCC(Core),
List(Vec<Core>)
}
pub type Core = Box<CoreLanguage>;
#[deriving(Clone)]
pub enum Obj {
ONil,
OBool(bool),
OInt(i32),
OFloat(f32),
OStr(String),
OClosure(Closure)
}
//(define compile
// (lambda (x next)
// (cond
// [(symbol? x)
// (list ’refer x next)]
// [(pair? x)
// (record-case x
// [quote (obj)
// (list ’constant obj next)]
// [lambda (vars body)
// (list ’close vars (compile body ’(return)) next)]
// [if (test then else)
// (let ([thenc (compile then next)]
// [elsec (compile else next)])
// (compile test (list ’test thenc elsec)))]
// [set! (var x)
// (compile x (list ’assign var next))]
// [call/cc (x)
// (let ([c (list ’conti
// (list ’argument
// (compile x ’(apply))))])
// (if (tail? next)
// c
// (list ’frame next c)))]
// [else
// (recur loop ([args (cdr x)]
// [c (compile (car x) ’(apply))])
// (if (null? args)
// (if (tail? next)
// c
// (list ’frame next c))
// (loop (cdr args)
// (compile (car args)
// (list ’argument c)))))])]
// [else
// (list ’constant x next)])))
pub fn compile(x: CoreLanguage, next: Code) -> Code {
match x {
Variable(str) => {
box REFER{var:str, k:next}
},
Quote(obj) => {
box CONSTANT{obj:ONil, k:next}
},
Lambda(vars, body) => {
box CLOSE{ vars:vars, body:compile(*body, box RETURN{unused:true}), k:next }
},
If(test, seq, alt) => {
let thenc = compile(*seq, next.clone());
let elsec = compile(*alt, next.clone());
compile(*test, box TEST{kthen:thenc, kelse:elsec})
},
Set(var, x) => {
compile(*x, box ASSIGN{var:var, k:next} )
},
CallCC(x) => {
let c = box CONTI{
k: box ARGUMENT{ k:compile(*x, box APPLY{unused:true}) }
};
if is_tail(&next) { c } else { box FRAME{k:next, ret:c} }
},
List(x) => {
let args = x.slice_from(1);
let mut c = compile((*x[0]).clone(), box APPLY{unused:true});
for arg in args.iter() {
c = compile((**arg).clone(), box ARGUMENT{k:c});
}
if is_tail(&next) { c } else { box FRAME{k:next, ret:c} }
}
_ =>
{ box CONSTANT{obj:ONil /*x*/, k:next} }
}
}
fn is_tail(x: &Code) -> bool {
match **x {
RETURN{..} => true,
_ => false
}
}
///////////////////////////////////////////////////////////////////////////////
// Opcode
// these are a dozen primitive instructions that implement scheme-like
// semantics. This is applicative-order lambda calculus with lexically-scoped
// environments: everything reduces to function calls where arguments are
// evaluated before application of function to arguments; variables are
// bound in their static (lexical) scope like Scheme, not in their dynamic
// (runtime) scope like earlier Lisps.
// Execution model is heap-based and there is support for call-with-current-continuation
// so exception semantics can be implemented easily in terms of call/cc.
#[deriving(Clone)]
pub enum Opcode {
HALT {unused:bool},
REFER {var: String, k: Code},
CONSTANT {obj: Obj, k: Code},
CLOSE {vars: Vec<String>, body: Code, k: Code},
TEST {kthen: Code, kelse: Code},
ASSIGN {var: String, k: Code},
CONTI {k: Code},
NUATE {s: Frame, var: String},
FRAME {k: Code, ret: Code},
ARGUMENT {k: Code},
APPLY {unused:bool},
INVOKE {method: String, k: Code},
RETURN {unused:bool},
}
pub type Code = Box<Opcode>;
/// Scope is a dynamic environment: a set of bindings, implemented
/// as a map from variable names (as Str, representing symbols)
/// to runtime value (Obj? if not typing; or AxonVal derivatives)
#[deriving(Clone)]
struct Scope {
parent: Option<Box<Scope>>, // link to enclosing scope
//local: HashMap<String, Obj>// local vars (conceptually includes fn params)
vars: Vec<String>,
vals: Vec<Obj>
}
impl Scope
{
fn new(parent:Option<Box<Scope>>) -> Scope {
Scope { parent:parent, vars:vec!(), vals:vec!() }
}
fn get(&self, var: &String) -> Option<Obj> {
let ix_opt = self.vars.iter().position(|v| { v == var });
match ix_opt {
Some(ix) => Some(self.vals[ix].clone()),
None => None
}
}
fn set(&mut self, var: &String, val: Obj) {
let ix_opt = self.vars.iter().position(|v| { v == var });
match ix_opt {
Some(ix) => { *self.vals.get_mut(ix) = val },
None => self.vals.push(val)
};
}
fn extend(&self, vars: Vec<String>, vals: Vec<Obj>) -> Scope {
Scope{
parent: Some(box self.clone()),
vars: vars,
vals: vals
}
}
}
/// Frame is the dynamic (runtime) representation of a function
/// execution. It captures the caller-frame and return-address,
/// so the complete dynamic context can be traced by walking back
/// thru the caller links; a bindings context (which pushes and
/// pops local scopes for variable definitions); the AST for the
/// function's code; and the instruction-pointer which indicates
/// the current point of execution in the code.
#[deriving(Clone)]
struct Frame {
// *X* when this frame returns, exec. resumes from caller.code[ret] (ie. ret is index into code of caller)
ret: Code,
// *E* parms,locals
bindings: Scope,
// *R* accumulator of arg vals, to be combined w/ param names in extending env
valueRib: Vec<Obj>,
// *S* previous frame
caller: Option<Box<Frame>>,
//code: Code //belongs in Frame (there's a frame for every lambda definition)
}
impl Frame {
fn make(env:Scope, rib: Vec<Obj>, ret: Code, caller: Option<Box<Frame>>)
-> Frame
{
Frame { bindings:env, valueRib:rib, ret:ret, caller:caller }
}
}
/// closure captures the environment where it was created; when called,
/// it binds its params to actual-arg values (in left-to-right listed order)
/// and extends its environment with those bindings, and executes its
/// body with that extended environment.
#[deriving(Clone)]
pub struct Closure {
// names of parameters to be applied to closure
params: Vec<String>,
// static environment (lexical scope, captures scopes enclosing definition)
env: Scope,
// code implementing body of closure.
body: Code
}
impl Closure {
fn make(params: Vec<String>, env: Scope, body: Code) -> Closure {
Closure { params:params, env:env, body:body }
}
}
/// The VM below is fundamentally a state machine, of course, and
/// the five registers capture the entire current-state of that machine.
struct VMState
{
/////////////////////////////////////////////////////////////////////
// Machine Registers
// accumulator (most-recently-evaluated-expression value)
A: Obj,
// next instruction to be executed (source is compiled into a directed-graph of Opcode)
X: Code,
// current (lexical) environment (bindings map, context,...)
E: Scope,
// value rib (accumulator for values of arguments to a fn application)
R: Vec<Obj>,
// control stack (ptr to top call frame; frames have link to prev frame)
S: Frame
}
impl VMState {
fn make(a:Obj, x:Code, e:Scope, r:Vec<Obj>, s:Frame) -> VMState {
VMState { A:a, X:x, E:e, R:r, S:s }
}
fn accumulator(&self) -> &Obj { &self.A }
fn program(&self) -> &Code { &self.X }
fn environment(&self) -> &Scope { &self.E }
fn arguments(&self) -> &Vec<Obj> { &self.R }
fn stackframe(&self) -> &Frame { &self.S }
}
///////////////////////////////////////////////////////////////////////////////
// axon machine: definition and implementation of virtual machine for
// scheme-like semantics
//
// let code be an in-memory graph (DAG) of instructions, where the entry-point
// to a sub-program is a single instruction.
// let instruction be a composition of:
// - opcode, an enumeration identifying its type
// - operands, compile-time constant arguments to the instruction
// - links, 0, 1, or 2 links to successor-instructions.
// note the single exception: the 'nuate instruction takes a Frame
// argument. This means that (as written), compiled code that
// includes call/cc won't be serializable, because the live control-stack
// frames aren't serializable. This only matters if we start thinking
// about serializing execution-in-process code and moving it to a
// different machine for resumption.
//...
//...
// A VM with 5 registers, 12 primitive instructions, and
// 3 basic data structures:
// - Frame captures a call-frame and maintains a dynamic control stack
// - Scope manages bindings of variables to values in lexically nested scopes
// - Closure binds parameters to actual args and executes code
struct Machine {
state: VMState
}
impl Machine
{
fn init(state: VMState) -> Machine { Machine { state:state } }
fn step(&mut self) -> Option<Obj> {
let (mut A,X,mut E,mut R,mut S) = (
self.state.A.clone(),
self.state.X.clone(),
self.state.E.clone(),
self.state.R.clone(),
self.state.S.clone()
);
let x = match *X {
// case HALT : return // and return A
HALT {..} => {
box HALT {unused:true}
},
// case REFER : I: REFER ; A = E[I.var]; X = I.next
REFER {var:ref var, k:ref k} => {
A = E.get(var).expect("yowza");
k.clone()
},
// case CONSTANT: I: CONSTANT; A = I.obj; X = I.next
CONSTANT {obj:ref obj, k:ref k} => {
A = obj.clone();
k.clone()
},
// | e(I.vars, E, I.body); X = I.next
CLOSE {vars:ref vars, body:ref body, k:ref k} => {
let a = Closure { params:vars.clone(), env:E.clone(), body:body.clone() };
A = OClosure(a);
k.clone()
},
// case TEST : I: TEST ; X = (A == true)? I.thenc : I.elsec
TEST {kthen:ref kthen, kelse:ref kelse} => {
let k = //if A == true { kthen } else { kelse };
match A {
OBool(true) => { kthen },
//OBool(false) => { kelse },
_ => { kelse }
};
k.clone()
},
// case ASSIGN : I: ASSIGN ; E[I.var] = A; X = I.next
ASSIGN {var:ref var, k:ref k} => {
E.set(var, A.clone());
k.clone()
},
// case CONTI : I: CONTI ; A = capture_cc(S); X = I.next
CONTI {k:ref k} => {
let a = Machine::capture_cc(&S);
A = OClosure(a);
k.clone()
},
// case NUATE : I: NUATE ; A = E[I.var]; X = RETURN;
NUATE {s:ref s, var:ref var} => {
A = E.get(var).expect("yup");
box RETURN {unused:true}
},
// case FRAME : I: FRAME ; S = Frame(E, R, I.ret, S); R = [,]; X = I.next
FRAME {k:ref k, ret:ref ret} => {
let s = Frame {
ret: ret.clone(),
bindings: E.clone(),
valueRib: R.clone(),
caller:Some(box S.clone())
};
S = s;
R = vec!();
k.clone()
},
// case ARGUMENT: I: ARGUMENT; R.add(A); X = I.next
ARGUMENT {k:ref k} => {
R.push(A.clone());
k.clone()
},
// case APPLY : I: APPLY ; closure := (AxonClosure) A
// vals := R
// vars := closure.params
// E = closure.env.extend(vars, vals)
// R = [,]
// X = closure.body
APPLY {..} => {
let closure = match A {
OClosure(ref clo) => { clo.clone() },
_ => fail!("yo! no clo")
};
let vals = R;
R = vec!();
let vars = closure.params.clone();
E = closure.env.extend(vars, vals);
closure.body
},
// case INVOKE : I: INVOKE ; obj := A
// // meth := obj.typeof.slot[I.method]
// args := (Obj?[]) R
// // A = meth.invoke(obj, args)
// R = [,]
// X = I.next
INVOKE {method:ref method, k:ref code} => {
let f = match A {
OClosure(ref clo) => { clo.clone() },
_ => fail!("no clo no mo")
};
let args = R;
R = vec!();
//TODO: A = (f)(args);
code.clone()
},
// case RETURN : I: RETURN ; X = S.ret; E = S.bindings; R = S.valueRib; S = S.caller
RETURN {..} => {
let x = S.ret;
E = S.bindings;
R = S.valueRib;
S = *S.caller.expect("DCM,ICU");
x
},
};
let retval = A.clone();
self.state = VMState {
A:A,
X:X,
E:E,
R:R,
S:S
};
//notifyObservers
Some(retval)
}
fn done(&self) -> bool {
match *self.state.X {
HALT {..} => { true },
_ => { false }
}
}
fn run(&mut self) -> Option<Obj> {
loop {
let retval = self.step();
if self.done() {
return retval;
}
}
}
/// a continuation is a closure that in addition has access to the frame
/// in which it was created (where call/cc was called).
/// the body of a continuation closure, when executed, restores the
/// saved frame (which includes its calling frames) (pg. 50)
///
/// a continuation generates a closure that captures
/// the current control stack; the body of the generated
/// closure is an instruction that will restore the
/// captured stack.
fn capture_cc(s: &Frame) -> Closure {
let v = "__V__";
let body = box NUATE{ s:s.clone(), var:v.to_string() };
let env = Scope::new(None);
let vars = vec!(v.to_string());
Closure { params:vars, env:env, body:body }
}
} | case CLOSE : I: CLOSE ; A = Closur | conditional_block |
lib.rs | // (A,X,E,R,S)
//
// Lizzie Borden took an axe
// And gave her mother forty whacks.
// When she saw what she had done,
// She gave her father forty-one.
//
#![feature(struct_variant)]
#![allow(dead_code)]
#![allow(uppercase_variables)]
#![allow(unused_variable)]
#![allow(unused_imports)]
//#![allow(visible_private_types)]
use std::collections::hashmap::HashMap;
mod r0;
mod r1;
// scheme kinda source-language
#[deriving(Clone)]
pub enum CoreLanguage {
// <core> → <object>
// <core> → <variable>
// <core> → (quote <object>)
// <core> → (lambda (<variable>... ) <core>)
// <core> → (if <core> <core> <core>)
// <core> → (set! <variable> <core>)
// <core> → (call/cc <core>)
// <core> → (<core> <core>... )
Object(Obj),
Variable(String),
Quote(Core),
Lambda(Vec<String>, Core),
If(Core, Core, Core),
Set(String, Core),
CallCC(Core),
List(Vec<Core>)
}
pub type Core = Box<CoreLanguage>;
#[deriving(Clone)]
pub enum Obj {
ONil,
OBool(bool),
OInt(i32),
OFloat(f32),
OStr(String),
OClosure(Closure)
}
//(define compile
// (lambda (x next)
// (cond
// [(symbol? x)
// (list ’refer x next)]
// [(pair? x)
// (record-case x
// [quote (obj)
// (list ’constant obj next)]
// [lambda (vars body)
// (list ’close vars (compile body ’(return)) next)]
// [if (test then else)
// (let ([thenc (compile then next)]
// [elsec (compile else next)])
// (compile test (list ’test thenc elsec)))]
// [set! (var x)
// (compile x (list ’assign var next))]
// [call/cc (x)
// (let ([c (list ’conti
// (list ’argument
// (compile x ’(apply))))])
// (if (tail? next)
// c
// (list ’frame next c)))]
// [else
// (recur loop ([args (cdr x)]
// [c (compile (car x) ’(apply))])
// (if (null? args)
// (if (tail? next)
// c
// (list ’frame next c))
// (loop (cdr args)
// (compile (car args)
// (list ’argument c)))))])]
// [else
// (list ’constant x next)])))
pub fn compile(x: CoreLanguage, next: Code) -> Code {
match x {
Variable(str) => {
box REFER{var:str, k:next}
},
Quote(obj) => {
box CONSTANT{obj:ONil, k:next}
},
Lambda(vars, body) => {
box CLOSE{ vars:vars, body:compile(*body, box RETURN{unused:true}), k:next }
},
If(test, seq, alt) => {
let thenc = compile(*seq, next.clone());
let elsec = compile(*alt, next.clone());
compile(*test, box TEST{kthen:thenc, kelse:elsec})
},
Set(var, x) => {
compile(*x, box ASSIGN{var:var, k:next} )
},
CallCC(x) => {
let c = box CONTI{
k: box ARGUMENT{ k:compile(*x, box APPLY{unused:true}) }
};
if is_tail(&next) { c } else { box FRAME{k:next, ret:c} }
},
List(x) => {
let args = x.slice_from(1);
let mut c = compile((*x[0]).clone(), box APPLY{unused:true});
for arg in args.iter() {
c = compile((**arg).clone(), box ARGUMENT{k:c});
}
if is_tail(&next) { c } else { box FRAME{k:next, ret:c} }
}
_ =>
{ box CONSTANT{obj:ONil /*x*/, k:next} }
}
}
fn is_tail(x: &Code) -> bool {
match **x {
RETURN{..} => true,
_ => false
}
}
///////////////////////////////////////////////////////////////////////////////
// Opcode
// these are a dozen primitive instructions that implement scheme-like
// semantics. This is applicative-order lambda calculus with lexically-scoped
// environments: everything reduces to function calls where arguments are
// evaluated before application of function to arguments; variables are
// bound in their static (lexical) scope like Scheme, not in their dynamic
// (runtime) scope like earlier Lisps.
// Execution model is heap-based and there is support for call-with-current-continuation
// so exception semantics can be implemented easily in terms of call/cc.
#[deriving(Clone)]
pub enum Opcode {
HALT {unused:bool},
REFER {var: String, k: Code},
CONSTANT {obj: Obj, k: Code},
CLOSE {vars: Vec<String>, body: Code, k: Code},
TEST {kthen: Code, kelse: Code},
ASSIGN {var: String, k: Code},
CONTI {k: Code},
NUATE {s: Frame, var: String},
FRAME {k: Code, ret: Code},
ARGUMENT {k: Code},
APPLY {unused:bool},
INVOKE {method: String, k: Code},
RETURN {unused:bool},
}
pub type Code = Box<Opcode>;
/// Scope is a dynamic environment: a set of bindings, implemented
/// as a map from variable names (as Str, representing symbols)
/// to runtime value (Obj? if not typing; or AxonVal derivatives)
#[deriving(Clone)]
struct Scope {
parent: Option<Box<Scope>>, // link to enclosing scope
//local: HashMap<String, Obj>// local vars (conceptually includes fn params)
vars: Vec<String>,
vals: Vec<Obj>
}
impl Scope
{
fn new(parent:Option<Box<Scope>>) -> Scope {
Scope { parent:parent, vars:vec!(), vals:vec!() }
}
fn get(&self, var: &String) -> Option<Obj> {
let ix_opt = self.vars.iter().position(|v| { v == var });
match ix_opt {
Some(ix) => Some(self.vals[ix].clone()),
None => None
}
}
fn set(&mut self, var: &String, val: Obj) {
let ix_opt = self.vars.iter().position(|v| { v == var });
match ix_opt {
Some(ix) => { *self.vals.get_mut(ix) = val },
None => self.vals.push(val)
};
}
fn extend(&self, vars: Vec<String>, vals: Vec<Obj>) -> Scope {
Scope{
parent: Some(box self.clone()),
vars: vars,
vals: vals
}
}
}
/// Frame is the dynamic (runtime) representation of a function
/// execution. It captures the caller-frame and return-address,
/// so the complete dynamic context can be traced by walking back
/// thru the caller links; a bindings context (which pushes and
/// pops local scopes for variable definitions); the AST for the
/// function's code; and the instruction-pointer which indicates
/// the current point of execution in the code.
#[deriving(Clone)]
struct Frame {
// *X* when this frame returns, exec. resumes from caller.code[ret] (ie. ret is index into code of caller)
ret: Code,
// *E* parms,locals
bindings: Scope,
// *R* accumulator of arg vals, to be combined w/ param names in extending env
valueRib: Vec<Obj>,
// *S* previous frame
caller: Option<Box<Frame>>,
//code: Code //belongs in Frame (there's a frame for every lambda definition)
}
impl Frame {
fn make(env:Scope, rib: Vec<Obj>, ret: Code, caller: Option<Box<Frame>>)
-> Frame
{
Frame { bindings:env, valueRib:rib, ret:ret, caller:caller }
}
}
/// closure captures the environment where it was created; when called,
/// it binds its params to actual-arg values (in left-to-right listed order)
/// and extends its environment with those bindings, and executes its
/// body with that extended environment.
#[deriving(Clone)]
pub struct Closure {
// names of parameters to be applied to closure
params: Vec<String>,
// static environment (lexical scope, captures scopes enclosing definition)
env: Scope,
// code implementing body of closure.
body: Code
}
impl Closure {
fn make(params: Vec<String>, env: Scope, body: Code) -> Closure {
Closure { params:params, env:env, body:body }
}
}
/// The VM below is fundamentally a state machine, of course, and
/// the five registers capture the entire current-state of that machine.
struct VMState
{
/////////////////////////////////////////////////////////////////////
// Machine Registers
// accumulator (most-recently-evaluated-expression value)
A: Obj,
// next instruction to be executed (source is compiled into a directed-graph of Opcode)
X: Code,
// current (lexical) environment (bindings map, context,...)
E: Scope,
// value rib (accumulator for values of arguments to a fn application)
R: Vec<Obj>, | S: Frame
}
impl VMState {
fn make(a:Obj, x:Code, e:Scope, r:Vec<Obj>, s:Frame) -> VMState {
VMState { A:a, X:x, E:e, R:r, S:s }
}
fn accumulator(&self) -> &Obj { &self.A }
fn program(&self) -> &Code { &self.X }
fn environment(&self) -> &Scope { &self.E }
fn arguments(&self) -> &Vec<Obj> { &self.R }
fn stackframe(&self) -> &Frame { &self.S }
}
///////////////////////////////////////////////////////////////////////////////
// axon machine: definition and implementation of virtual machine for
// scheme-like semantics
//
// let code be an in-memory graph (DAG) of instructions, where the entry-point
// to a sub-program is a single instruction.
// let instruction be a composition of:
// - opcode, an enumeration identifying its type
// - operands, compile-time constant arguments to the instruction
// - links, 0, 1, or 2 links to successor-instructions.
// note the single exception: the 'nuate instruction takes a Frame
// argument. This means that (as written), compiled code that
// includes call/cc won't be serializable, because the live control-stack
// frames aren't serializable. This only matters if we start thinking
// about serializing execution-in-process code and moving it to a
// different machine for resumption.
//...
//...
// A VM with 5 registers, 12 primitive instructions, and
// 3 basic data structures:
// - Frame captures a call-frame and maintains a dynamic control stack
// - Scope manages bindings of variables to values in lexically nested scopes
// - Closure binds parameters to actual args and executes code
struct Machine {
state: VMState
}
impl Machine
{
fn init(state: VMState) -> Machine { Machine { state:state } }
fn step(&mut self) -> Option<Obj> {
let (mut A,X,mut E,mut R,mut S) = (
self.state.A.clone(),
self.state.X.clone(),
self.state.E.clone(),
self.state.R.clone(),
self.state.S.clone()
);
let x = match *X {
// case HALT : return // and return A
HALT {..} => {
box HALT {unused:true}
},
// case REFER : I: REFER ; A = E[I.var]; X = I.next
REFER {var:ref var, k:ref k} => {
A = E.get(var).expect("yowza");
k.clone()
},
// case CONSTANT: I: CONSTANT; A = I.obj; X = I.next
CONSTANT {obj:ref obj, k:ref k} => {
A = obj.clone();
k.clone()
},
// case CLOSE : I: CLOSE ; A = Closure(I.vars, E, I.body); X = I.next
CLOSE {vars:ref vars, body:ref body, k:ref k} => {
let a = Closure { params:vars.clone(), env:E.clone(), body:body.clone() };
A = OClosure(a);
k.clone()
},
// case TEST : I: TEST ; X = (A == true)? I.thenc : I.elsec
TEST {kthen:ref kthen, kelse:ref kelse} => {
let k = //if A == true { kthen } else { kelse };
match A {
OBool(true) => { kthen },
//OBool(false) => { kelse },
_ => { kelse }
};
k.clone()
},
// case ASSIGN : I: ASSIGN ; E[I.var] = A; X = I.next
ASSIGN {var:ref var, k:ref k} => {
E.set(var, A.clone());
k.clone()
},
// case CONTI : I: CONTI ; A = capture_cc(S); X = I.next
CONTI {k:ref k} => {
let a = Machine::capture_cc(&S);
A = OClosure(a);
k.clone()
},
// case NUATE : I: NUATE ; A = E[I.var]; X = RETURN;
NUATE {s:ref s, var:ref var} => {
A = E.get(var).expect("yup");
box RETURN {unused:true}
},
// case FRAME : I: FRAME ; S = Frame(E, R, I.ret, S); R = [,]; X = I.next
FRAME {k:ref k, ret:ref ret} => {
let s = Frame {
ret: ret.clone(),
bindings: E.clone(),
valueRib: R.clone(),
caller:Some(box S.clone())
};
S = s;
R = vec!();
k.clone()
},
// case ARGUMENT: I: ARGUMENT; R.add(A); X = I.next
ARGUMENT {k:ref k} => {
R.push(A.clone());
k.clone()
},
// case APPLY : I: APPLY ; closure := (AxonClosure) A
// vals := R
// vars := closure.params
// E = closure.env.extend(vars, vals)
// R = [,]
// X = closure.body
APPLY {..} => {
let closure = match A {
OClosure(ref clo) => { clo.clone() },
_ => fail!("yo! no clo")
};
let vals = R;
R = vec!();
let vars = closure.params.clone();
E = closure.env.extend(vars, vals);
closure.body
},
// case INVOKE : I: INVOKE ; obj := A
// // meth := obj.typeof.slot[I.method]
// args := (Obj?[]) R
// // A = meth.invoke(obj, args)
// R = [,]
// X = I.next
INVOKE {method:ref method, k:ref code} => {
let f = match A {
OClosure(ref clo) => { clo.clone() },
_ => fail!("no clo no mo")
};
let args = R;
R = vec!();
//TODO: A = (f)(args);
code.clone()
},
// case RETURN : I: RETURN ; X = S.ret; E = S.bindings; R = S.valueRib; S = S.caller
RETURN {..} => {
let x = S.ret;
E = S.bindings;
R = S.valueRib;
S = *S.caller.expect("DCM,ICU");
x
},
};
let retval = A.clone();
self.state = VMState {
A:A,
X:X,
E:E,
R:R,
S:S
};
//notifyObservers
Some(retval)
}
fn done(&self) -> bool {
match *self.state.X {
HALT {..} => { true },
_ => { false }
}
}
fn run(&mut self) -> Option<Obj> {
loop {
let retval = self.step();
if self.done() {
return retval;
}
}
}
/// a continuation is a closure that in addition has access to the frame
/// in which it was created (where call/cc was called).
/// the body of a continuation closure, when executed, restores the
/// saved frame (which includes its calling frames) (pg. 50)
///
/// a continuation generates a closure that captures
/// the current control stack; the body of the generated
/// closure is an instruction that will restore the
/// captured stack.
fn capture_cc(s: &Frame) -> Closure {
let v = "__V__";
let body = box NUATE{ s:s.clone(), var:v.to_string() };
let env = Scope::new(None);
let vars = vec!(v.to_string());
Closure { params:vars, env:env, body:body }
}
} |
// control stack (ptr to top call frame; frames have link to prev frame) | random_line_split |
nvg.rs | //! NanoVG is small antialiased vector graphics rendering library with a lean
//! API modeled after the HTML5 Canvas API. It can be used to draw gauge
//! instruments in MSFS. See `Gauge::create_nanovg`.
use crate::sys;
type Result = std::result::Result<(), Box<dyn std::error::Error>>;
/// A NanoVG render context.
pub struct Context {
ctx: *mut sys::NVGcontext,
}
impl Context {
/// Create a NanoVG render context from an `FsContext`.
pub fn create(fs_ctx: sys::FsContext) -> Option<Self> {
let uninit = std::mem::MaybeUninit::<sys::NVGparams>::zeroed();
let mut params = unsafe { uninit.assume_init() };
params.userPtr = fs_ctx;
params.edgeAntiAlias = 1;
let ctx = unsafe { sys::nvgCreateInternal(&mut params) };
if ctx.is_null() {
None
} else {
Some(Self { ctx })
}
}
/// Draw a frame.
pub fn draw_frame<F: Fn(&Frame) -> Result>(&self, width: usize, height: usize, f: F) {
unsafe {
sys::nvgBeginFrame(self.ctx, width as f32, height as f32, 1.0);
}
let frame = Frame { ctx: self.ctx };
match f(&frame) {
Ok(()) => unsafe {
sys::nvgEndFrame(self.ctx);
},
Err(_) => unsafe {
sys::nvgCancelFrame(self.ctx);
},
}
}
/// NanoVG allows you to load.ttf files and use the font to render text.
///
/// The appearance of the text can be defined by setting the current text style
/// and by specifying the fill color. Common text and font settings such as
/// font size, letter spacing and text align are supported. Font blur allows you
/// to create simple text effects such as drop shadows.
///
/// At render time the font face can be set based on the font handles or name.
///
/// Font measure functions return values in local space, the calculations are
/// carried in the same resolution as the final rendering. This is done because
/// the text glyph positions are snapped to the nearest pixels sharp rendering.
///
/// The local space means that values are not rotated or scale as per the current
/// transformation. For example if you set font size to 12, which would mean that
/// line height is 16, then regardless of the current scaling and rotation, the
/// returned line height is always 16. Some measures may vary because of the scaling
/// since aforementioned pixel snapping.
///
/// While this may sound a little odd, the setup allows you to always render the
/// same way regardless of scaling.
///
/// Note: currently only solid color fill is supported for text.
pub fn create_font(
&self,
name: &str,
filename: &str,
) -> std::result::Result<Font, Box<dyn std::error::Error>> {
let name = std::ffi::CString::new(name).unwrap();
let filename = std::ffi::CString::new(filename).unwrap();
let handle = unsafe { sys::nvgCreateFont(self.ctx, name.as_ptr(), filename.as_ptr()) };
match handle {
-1 => Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
"unable to load font",
))),
_ => Ok(Font { handle }),
}
}
/// NanoVG allows you to load jpg, png, psd, tga, pic and gif files to be used for rendering.
/// In addition you can upload your own image. The image loading is provided by stb_image.
pub fn create_image(
&self,
filename: &str,
) -> std::result::Result<Image, Box<dyn std::error::Error>> {
let filename = std::ffi::CString::new(filename).unwrap();
let handle = unsafe { sys::nvgCreateImage(self.ctx, filename.as_ptr(), 0) };
match handle {
-1 => Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
"unable to load image",
))),
_ => Ok(Image {
ctx: self.ctx,
handle,
}),
}
}
}
impl Drop for Context {
fn drop(&mut self) {
unsafe {
sys::nvgDeleteInternal(self.ctx);
}
}
}
/// Methods to draw on a frame. See `Context::draw_frame`.
pub struct Frame {
ctx: *mut sys::NVGcontext,
}
impl Frame {
/// Draw a path.
pub fn draw_path<F: Fn(&Path) -> Result>(&self, style: &Style, f: F) -> Result {
unsafe {
// sys::nvgSave(self.ctx);
// sys::nvgReset(self.ctx);
sys::nvgBeginPath(self.ctx);
}
if let Some(stroke) = &style.stroke {
match stroke {
PaintOrColor::Paint(p) => unsafe {
sys::nvgStrokePaint(self.ctx, &p.0);
},
PaintOrColor::Color(c) => unsafe {
sys::nvgStrokeColor(self.ctx, &c.0);
},
}
}
if let Some(fill) = &style.fill {
match fill {
PaintOrColor::Paint(p) => unsafe {
sys::nvgFillPaint(self.ctx, &p.0);
},
PaintOrColor::Color(c) => unsafe {
sys::nvgFillColor(self.ctx, &c.0);
},
}
}
let path = Path { ctx: self.ctx };
let r = f(&path);
if style.stroke.is_some() {
unsafe {
sys::nvgStroke(self.ctx);
}
}
if style.fill.is_some() {
unsafe {
sys::nvgFill(self.ctx);
}
}
/*
unsafe {
sys::nvgRestore(self.ctx);
}
*/
r
}
}
/// A path.
pub struct Path {
ctx: *mut sys::NVGcontext,
}
impl Path {
/// Starts new sub-path with specified point as first point.
pub fn move_to(&self, x: f32, y: f32) {
unsafe {
sys::nvgMoveTo(self.ctx, x, y);
}
}
/// Adds line segment from the last point in the path to the specified point.
pub fn line_to(&self, x: f32, y: f32) {
unsafe {
sys::nvgLineTo(self.ctx, x, y);
}
}
/// Adds cubic bezier segment from last point in the path via two control points to the specified point.
pub fn bezier_to(&self, c1x: f32, c1y: f32, c2x: f32, c2y: f32, x: f32, y: f32) {
unsafe {
sys::nvgBezierTo(self.ctx, c1x, c1y, c2x, c2y, x, y);
}
}
/// Adds quadratic bezier segment from last point in the path via a control point to the
/// specified point.
pub fn quad_to(&self, cx: f32, cy: f32, x: f32, y: f32) {
unsafe {
sys::nvgQuadTo(self.ctx, cx, cy, x, y);
}
}
/// Adds an arc segment at the corner defined by the last path point, and two specified points.
pub fn arc_to(&self, x1: f32, y1: f32, x2: f32, y2: f32, radius: f32) {
unsafe {
sys::nvgArcTo(self.ctx, x1, y1, x2, y2, radius);
}
}
/// Closes current sub-path with a line segment.
pub fn close_path(&self) {
unsafe {
sys::nvgClosePath(self.ctx);
}
}
/// Creates a new circle arc shaped sub-path. The arc center is at (`cx`,`cy`), the arc radius
/// is `r`, and the arc is drawn from angle `a0` to `a1`, and swept in direction `dir`.
/// Angles are in radians.
pub fn arc(&self, cx: f32, cy: f32, r: f32, a0: f32, a1: f32, dir: Direction) {
unsafe {
sys::nvgArc(self.ctx, cx, cy, r, a0, a1, dir.to_sys() as _);
}
}
/// Creates a new oval arc shaped sub-path. The arc center is at (`cx`, `cy`), the arc radius
/// is (`rx`, `ry`), and the arc is draw from angle a0 to a1, and swept in direction `dir`.
#[allow(clippy::too_many_arguments)]
pub fn elliptical_arc(
&self,
cx: f32,
cy: f32,
rx: f32,
ry: f32,
a0: f32,
a1: f32,
dir: Direction,
) {
unsafe {
sys::nvgEllipticalArc(self.ctx, cx, cy, rx, ry, a0, a1, dir.to_sys() as _);
}
}
/// Creates new rectangle shaped sub-path.
pub fn rect(&self, x: f32, y: f32, w: f32, h: f32) {
unsafe {
sys::nvgRect(self.ctx, x, y, w, h);
}
}
/// Creates a new rounded rectangle sub-path with rounded corners
#[allow(clippy::many_single_char_names)]
pub fn rounded_rect(&self, x: f32, y: f32, w: f32, h: f32, r: f32) {
unsafe {
sys::nvgRoundedRect(self.ctx, x, y, w, h, r);
}
}
/// Creates new rounded rectangle shaped sub-path with varying radii for each corner.
#[allow(clippy::too_many_arguments)]
#[allow(clippy::many_single_char_names)]
pub fn rounded_rect_varying(
&self,
x: f32,
y: f32,
w: f32,
h: f32,
rad_top_left: f32,
rad_top_right: f32,
rad_bottom_right: f32,
rad_bottom_left: f32,
) {
unsafe {
sys::nvgRoundedRectVarying(
self.ctx,
x,
y,
w,
h,
rad_top_left,
rad_top_right,
rad_bottom_right,
rad_bottom_left,
);
}
}
/// Creates a new ellipse shaped sub-path.
pub fn ellipse(&self, cx: f32, cy: f32, rx: f32, ry: f32) {
unsafe {
sys::nvgEllipse(self.ctx, cx, cy, rx, ry);
}
}
/// Creates a new circle shaped path.
pub fn circle(&self, cx: f32, cy: f32, r: f32) {
unsafe {
sys::nvgCircle(self.ctx, cx, cy, r);
}
}
// TODO: fill
}
/// Winding direction
#[derive(Debug, Clone, Copy)]
pub enum Direction {
/// Winding for holes.
Clockwise,
/// Winding for solid shapes.
CounterClockwise,
}
impl Direction {
fn | (self) -> sys::NVGwinding {
match self {
Direction::Clockwise => sys::NVGwinding_NVG_CW,
Direction::CounterClockwise => sys::NVGwinding_NVG_CCW,
}
}
}
#[derive(Debug)]
#[doc(hidden)]
pub enum PaintOrColor {
Paint(Paint),
Color(Color),
}
impl From<Paint> for PaintOrColor {
fn from(p: Paint) -> PaintOrColor {
PaintOrColor::Paint(p)
}
}
impl From<Color> for PaintOrColor {
fn from(c: Color) -> PaintOrColor {
PaintOrColor::Color(c)
}
}
/// The stroke and/or fill which will be applied to a path.
#[derive(Debug, Default)]
pub struct Style {
stroke: Option<PaintOrColor>,
fill: Option<PaintOrColor>,
}
impl Style {
/// Set the stroke of this style.
pub fn stroke<T: Into<PaintOrColor>>(mut self, stroke: T) -> Self {
self.stroke = Some(stroke.into());
self
}
/// Set the fill of this style.
pub fn fill<T: Into<PaintOrColor>>(mut self, fill: T) -> Self {
self.fill = Some(fill.into());
self
}
}
/// Colors in NanoVG are stored as unsigned ints in ABGR format.
#[derive(Debug)]
pub struct Color(sys::NVGcolor);
impl Color {
/// Returns a color value from red, green, blue values. Alpha will be set to 255 (1.0).
pub fn from_rgb(r: u8, g: u8, b: u8) -> Self {
Self(unsafe { sys::nvgRGB(r, g, b) })
}
/// Returns a color value from red, green, blue values. Alpha will be set to 1.0f.
pub fn from_rgbf(r: f32, g: f32, b: f32) -> Self {
Self(unsafe { sys::nvgRGBf(r, g, b) })
}
/// Returns a color value from red, green, blue and alpha values.
pub fn from_rgba(r: u8, g: u8, b: u8, a: u8) -> Self {
Self(unsafe { sys::nvgRGBA(r, g, b, a) })
}
/// Returns a color value from red, green, blue values. Alpha will be set to 1.0f.
pub fn from_rgbaf(r: f32, g: f32, b: f32, a: f32) -> Self {
Self(unsafe { sys::nvgRGBAf(r, g, b, a) })
}
/// Returns color value specified by hue, saturation and lightness.
/// HSL values are all in range [0..1], alpha will be set to 255.
pub fn from_hsv(h: f32, s: f32, l: f32) -> Self {
Self(unsafe { sys::nvgHSL(h, s, l) })
}
/// Returns color value specified by hue, saturation and lightness.
/// HSL values are all in range [0..1], alpha will be set to 255.
pub fn from_hsva(h: f32, s: f32, l: f32, a: u8) -> Self {
Self(unsafe { sys::nvgHSLA(h, s, l, a) })
}
}
/// NanoVG supports four types of paints: linear gradient, box gradient, radial gradient and image pattern.
/// These can be used as paints for strokes and fills.
#[derive(Debug)]
pub struct Paint(sys::NVGpaint);
impl Paint {
/// Creates and returns an image pattern. Parameters (`x`, `y`) specify the left-top location of the image pattern,
/// (`w`, `h`) is the size of the image, `angle` is the rotation around the top-left corner, and `image` is the image
/// to render.
pub fn from_image(
image: &Image,
x: f32,
y: f32,
w: f32,
h: f32,
angle: f32,
alpha: f32,
) -> Paint {
Paint(unsafe { sys::nvgImagePattern(image.ctx, x, y, w, h, angle, image.handle, alpha) })
}
}
/// A font handle.
pub struct Font {
handle: std::os::raw::c_int,
}
/// An image handle.
pub struct Image {
ctx: *mut sys::NVGcontext,
handle: std::os::raw::c_int,
}
impl Image {
/// Returns the dimensions of a created image.
pub fn size(&self) -> (usize, usize) {
let mut w = 0;
let mut h = 0;
unsafe {
sys::nvgImageSize(self.ctx, self.handle, &mut w, &mut h);
}
(w as usize, h as usize)
}
}
impl Drop for Image {
fn drop(&mut self) {
unsafe {
sys::nvgDeleteImage(self.ctx, self.handle);
}
}
}
| to_sys | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.