file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
task.rs
|
//! Helpers to program the task state segment.
//! See Intel 3a, Chapter 7, Section 7
use crate::Ring;
/// Although hardware task-switching is not supported in 64-bit mode,
/// a 64-bit task state segment (TSS) must exist.
///
/// The TSS holds information important to 64-bit mode and that is not
/// directly related to the task-switch mechanism. This information includes:
///
/// # RSPn
/// The full 64-bit canonical forms of the stack pointers (RSP) for privilege levels 0-2.
/// RSPx is loaded in whenever an interrupt causes the CPU to change RPL to x.
/// Note on a syscall entry this field is not used to load a stack, setting the stack there
/// is the handler's responsibility (however when using the int instruction in user-space,
/// we load the stack from RSPn).
///
/// # ISTn
/// The full 64-bit canonical forms of the interrupt stack table (IST) pointers.
/// You can set an interrupt vector to use an IST entry in the Interrupt Descriptor
/// Table by giving it a number from 0 - 7. If 0 is selected, then the IST mechanism
/// is not used. If any other number is selected then when that interrupt vector is
/// called the CPU will load RSP from the corresponding IST entry. This is useful for
/// handling things like double faults, since you don't have to worry about switching
/// stacks; the CPU will do it for you.
///
/// # I/O map base address
/// The 16-bit offset to the I/O permission bit map from the 64-bit TSS base.
///
/// The operating system must create at least one 64-bit TSS after activating IA-32e mode.
/// It must execute the LTR instruction (in 64-bit mode) to load the TR register with a
/// pointer to the 64-bit TSS responsible for both 64-bitmode programs and
/// compatibility-mode programs ([load_tr](crate::task::load_tr)).
#[derive(Clone, Copy, Debug, Default)]
#[repr(C, packed)]
pub struct TaskStateSegment {
pub reserved: u32,
/// The full 64-bit canonical forms of the stack pointers (RSP) for privilege levels 0-2.
pub rsp: [u64; 3],
pub reserved2: u64,
/// The full 64-bit canonical forms of the interrupt stack table (IST) pointers.
|
/// The 16-bit offset to the I/O permission bit map from the 64-bit TSS base.
pub iomap_base: u16,
}
impl TaskStateSegment {
/// Creates a new empty TSS.
pub const fn new() -> TaskStateSegment {
TaskStateSegment {
reserved: 0,
rsp: [0; 3],
reserved2: 0,
ist: [0; 7],
reserved3: 0,
reserved4: 0,
iomap_base: 0,
}
}
/// Sets the stack pointer (`stack_ptr`) to be used for when
/// an interrupt causes the CPU to change RPL to `pl`.
pub fn set_rsp(&mut self, pl: Ring, stack_ptr: u64) {
match pl {
Ring::Ring0 => self.rsp[0] = stack_ptr,
Ring::Ring1 => self.rsp[1] = stack_ptr,
Ring::Ring2 => self.rsp[2] = stack_ptr,
Ring::Ring3 => unreachable!("Can't set stack for PL3"),
}
}
/// Sets the stack pointer (`stack_ptr`) to be used when
/// an interrupt with a corresponding IST entry in the Interrupt
/// Descriptor table pointing to the given `index` is raised.
pub fn set_ist(&mut self, index: usize, stack_ptr: u64) {
match index {
0 => self.ist[0] = stack_ptr,
1 => self.ist[1] = stack_ptr,
2 => self.ist[2] = stack_ptr,
3 => self.ist[3] = stack_ptr,
4 => self.ist[4] = stack_ptr,
5 => self.ist[5] = stack_ptr,
6 => self.ist[6] = stack_ptr,
_ => unreachable!("Can't set IST for this index (out of bounds)."),
}
}
}
|
pub ist: [u64; 7],
pub reserved3: u64,
pub reserved4: u16,
|
random_line_split
|
task.rs
|
//! Helpers to program the task state segment.
//! See Intel 3a, Chapter 7, Section 7
use crate::Ring;
/// Although hardware task-switching is not supported in 64-bit mode,
/// a 64-bit task state segment (TSS) must exist.
///
/// The TSS holds information important to 64-bit mode and that is not
/// directly related to the task-switch mechanism. This information includes:
///
/// # RSPn
/// The full 64-bit canonical forms of the stack pointers (RSP) for privilege levels 0-2.
/// RSPx is loaded in whenever an interrupt causes the CPU to change RPL to x.
/// Note on a syscall entry this field is not used to load a stack, setting the stack there
/// is the handler's responsibility (however when using the int instruction in user-space,
/// we load the stack from RSPn).
///
/// # ISTn
/// The full 64-bit canonical forms of the interrupt stack table (IST) pointers.
/// You can set an interrupt vector to use an IST entry in the Interrupt Descriptor
/// Table by giving it a number from 0 - 7. If 0 is selected, then the IST mechanism
/// is not used. If any other number is selected then when that interrupt vector is
/// called the CPU will load RSP from the corresponding IST entry. This is useful for
/// handling things like double faults, since you don't have to worry about switching
/// stacks; the CPU will do it for you.
///
/// # I/O map base address
/// The 16-bit offset to the I/O permission bit map from the 64-bit TSS base.
///
/// The operating system must create at least one 64-bit TSS after activating IA-32e mode.
/// It must execute the LTR instruction (in 64-bit mode) to load the TR register with a
/// pointer to the 64-bit TSS responsible for both 64-bitmode programs and
/// compatibility-mode programs ([load_tr](crate::task::load_tr)).
#[derive(Clone, Copy, Debug, Default)]
#[repr(C, packed)]
pub struct TaskStateSegment {
pub reserved: u32,
/// The full 64-bit canonical forms of the stack pointers (RSP) for privilege levels 0-2.
pub rsp: [u64; 3],
pub reserved2: u64,
/// The full 64-bit canonical forms of the interrupt stack table (IST) pointers.
pub ist: [u64; 7],
pub reserved3: u64,
pub reserved4: u16,
/// The 16-bit offset to the I/O permission bit map from the 64-bit TSS base.
pub iomap_base: u16,
}
impl TaskStateSegment {
/// Creates a new empty TSS.
pub const fn new() -> TaskStateSegment {
TaskStateSegment {
reserved: 0,
rsp: [0; 3],
reserved2: 0,
ist: [0; 7],
reserved3: 0,
reserved4: 0,
iomap_base: 0,
}
}
/// Sets the stack pointer (`stack_ptr`) to be used for when
/// an interrupt causes the CPU to change RPL to `pl`.
pub fn set_rsp(&mut self, pl: Ring, stack_ptr: u64)
|
/// Sets the stack pointer (`stack_ptr`) to be used when
/// an interrupt with a corresponding IST entry in the Interrupt
/// Descriptor table pointing to the given `index` is raised.
pub fn set_ist(&mut self, index: usize, stack_ptr: u64) {
match index {
0 => self.ist[0] = stack_ptr,
1 => self.ist[1] = stack_ptr,
2 => self.ist[2] = stack_ptr,
3 => self.ist[3] = stack_ptr,
4 => self.ist[4] = stack_ptr,
5 => self.ist[5] = stack_ptr,
6 => self.ist[6] = stack_ptr,
_ => unreachable!("Can't set IST for this index (out of bounds)."),
}
}
}
|
{
match pl {
Ring::Ring0 => self.rsp[0] = stack_ptr,
Ring::Ring1 => self.rsp[1] = stack_ptr,
Ring::Ring2 => self.rsp[2] = stack_ptr,
Ring::Ring3 => unreachable!("Can't set stack for PL3"),
}
}
|
identifier_body
|
task.rs
|
//! Helpers to program the task state segment.
//! See Intel 3a, Chapter 7, Section 7
use crate::Ring;
/// Although hardware task-switching is not supported in 64-bit mode,
/// a 64-bit task state segment (TSS) must exist.
///
/// The TSS holds information important to 64-bit mode and that is not
/// directly related to the task-switch mechanism. This information includes:
///
/// # RSPn
/// The full 64-bit canonical forms of the stack pointers (RSP) for privilege levels 0-2.
/// RSPx is loaded in whenever an interrupt causes the CPU to change RPL to x.
/// Note on a syscall entry this field is not used to load a stack, setting the stack there
/// is the handler's responsibility (however when using the int instruction in user-space,
/// we load the stack from RSPn).
///
/// # ISTn
/// The full 64-bit canonical forms of the interrupt stack table (IST) pointers.
/// You can set an interrupt vector to use an IST entry in the Interrupt Descriptor
/// Table by giving it a number from 0 - 7. If 0 is selected, then the IST mechanism
/// is not used. If any other number is selected then when that interrupt vector is
/// called the CPU will load RSP from the corresponding IST entry. This is useful for
/// handling things like double faults, since you don't have to worry about switching
/// stacks; the CPU will do it for you.
///
/// # I/O map base address
/// The 16-bit offset to the I/O permission bit map from the 64-bit TSS base.
///
/// The operating system must create at least one 64-bit TSS after activating IA-32e mode.
/// It must execute the LTR instruction (in 64-bit mode) to load the TR register with a
/// pointer to the 64-bit TSS responsible for both 64-bitmode programs and
/// compatibility-mode programs ([load_tr](crate::task::load_tr)).
#[derive(Clone, Copy, Debug, Default)]
#[repr(C, packed)]
pub struct TaskStateSegment {
pub reserved: u32,
/// The full 64-bit canonical forms of the stack pointers (RSP) for privilege levels 0-2.
pub rsp: [u64; 3],
pub reserved2: u64,
/// The full 64-bit canonical forms of the interrupt stack table (IST) pointers.
pub ist: [u64; 7],
pub reserved3: u64,
pub reserved4: u16,
/// The 16-bit offset to the I/O permission bit map from the 64-bit TSS base.
pub iomap_base: u16,
}
impl TaskStateSegment {
/// Creates a new empty TSS.
pub const fn new() -> TaskStateSegment {
TaskStateSegment {
reserved: 0,
rsp: [0; 3],
reserved2: 0,
ist: [0; 7],
reserved3: 0,
reserved4: 0,
iomap_base: 0,
}
}
/// Sets the stack pointer (`stack_ptr`) to be used for when
/// an interrupt causes the CPU to change RPL to `pl`.
pub fn
|
(&mut self, pl: Ring, stack_ptr: u64) {
match pl {
Ring::Ring0 => self.rsp[0] = stack_ptr,
Ring::Ring1 => self.rsp[1] = stack_ptr,
Ring::Ring2 => self.rsp[2] = stack_ptr,
Ring::Ring3 => unreachable!("Can't set stack for PL3"),
}
}
/// Sets the stack pointer (`stack_ptr`) to be used when
/// an interrupt with a corresponding IST entry in the Interrupt
/// Descriptor table pointing to the given `index` is raised.
pub fn set_ist(&mut self, index: usize, stack_ptr: u64) {
match index {
0 => self.ist[0] = stack_ptr,
1 => self.ist[1] = stack_ptr,
2 => self.ist[2] = stack_ptr,
3 => self.ist[3] = stack_ptr,
4 => self.ist[4] = stack_ptr,
5 => self.ist[5] = stack_ptr,
6 => self.ist[6] = stack_ptr,
_ => unreachable!("Can't set IST for this index (out of bounds)."),
}
}
}
|
set_rsp
|
identifier_name
|
expr.rs
|
use crate::position::Pos;
use crate::tokenizer::{Kind, TokenStream};
/// Error of expression checking
///
/// See [check][].
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("{}: tokenizer error: {}", _1, _0)]
Tokenizer(String, Pos),
#[error(
"{}: closing bracket mismatch, opened {:?} at {}, encountered {:?}",
closing_pos, opened, opened_pos, encountered)]
BracketMismatch {
opened: &'static str,
encountered: &'static str,
opened_pos: Pos,
closing_pos: Pos,
},
#[error("{}: extra closing bracket {:?}", _1, _0)]
ExtraBracket(&'static str, Pos),
#[error("{}: bracket {:?} has never been closed", _1, _0)]
MissingBracket(&'static str, Pos),
#[error("{}: token {:?} is not allowed in expression \
(try parenthesize the expression)", _1, _0)]
UnexpectedToken(String, Pos),
#[error("expression is empty")]
Empty,
}
fn bracket_str(tok: Kind) -> &'static str {
use crate::tokenizer::Kind::*;
match tok {
OpenBracket => "[",
CloseBracket => "]",
OpenBrace => "{",
CloseBrace => "}",
OpenParen => "(",
CloseParen => ")",
_ => unreachable!("token is not a bracket"),
}
}
fn matching_bracket(tok: Kind) -> Kind {
use crate::tokenizer::Kind::*;
match tok {
OpenBracket => CloseBracket,
OpenBrace => CloseBrace,
OpenParen => CloseParen,
_ => unreachable!("token is not a bracket"),
}
}
/// Minimal validation of expression
///
/// This is used for substitutions in migrations. This check merely ensures
/// that overall structure of the statement is not ruined. Mostly checks for
/// matching brackets and quotes closed.
///
/// More specificaly current implementation checks that expression is not
/// empty, checks for valid tokens, matching braces and disallows comma `,`and
|
pub fn check(text: &str) -> Result<(), Error> {
use crate::tokenizer::Kind::*;
use Error::*;
let mut brackets = Vec::new();
let mut parser = &mut TokenStream::new(text);
let mut empty = true;
for token in &mut parser {
let (token, pos) = match token {
Ok(t) => (t.token, t.start),
Err(combine::easy::Error::Unexpected(s)) => {
return Err(Tokenizer(
s.to_string(), parser.current_pos()));
}
Err(e) => {
return Err(Tokenizer(
e.to_string(), parser.current_pos()));
}
};
empty = false;
match token.kind {
Comma | Semicolon if brackets.is_empty() => {
return Err(UnexpectedToken(token.value.to_string(), pos));
}
OpenParen | OpenBracket | OpenBrace => {
brackets.push((token.kind, pos));
}
CloseParen | CloseBracket | CloseBrace => match brackets.pop() {
Some((opened, opened_pos)) => {
if matching_bracket(opened)!= token.kind {
return Err(BracketMismatch {
opened: bracket_str(opened),
opened_pos,
encountered: bracket_str(token.kind),
closing_pos: pos,
});
}
}
None => {
return Err(ExtraBracket(bracket_str(token.kind), pos));
}
},
_ => {}
}
};
if let Some((bracket, pos)) = brackets.pop() {
return Err(MissingBracket(bracket_str(bracket), pos));
}
if empty {
return Err(Empty);
}
Ok(())
}
|
/// semicolon `;` outside of brackets.
///
/// This is NOT a security measure.
|
random_line_split
|
expr.rs
|
use crate::position::Pos;
use crate::tokenizer::{Kind, TokenStream};
/// Error of expression checking
///
/// See [check][].
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("{}: tokenizer error: {}", _1, _0)]
Tokenizer(String, Pos),
#[error(
"{}: closing bracket mismatch, opened {:?} at {}, encountered {:?}",
closing_pos, opened, opened_pos, encountered)]
BracketMismatch {
opened: &'static str,
encountered: &'static str,
opened_pos: Pos,
closing_pos: Pos,
},
#[error("{}: extra closing bracket {:?}", _1, _0)]
ExtraBracket(&'static str, Pos),
#[error("{}: bracket {:?} has never been closed", _1, _0)]
MissingBracket(&'static str, Pos),
#[error("{}: token {:?} is not allowed in expression \
(try parenthesize the expression)", _1, _0)]
UnexpectedToken(String, Pos),
#[error("expression is empty")]
Empty,
}
fn bracket_str(tok: Kind) -> &'static str {
use crate::tokenizer::Kind::*;
match tok {
OpenBracket => "[",
CloseBracket => "]",
OpenBrace => "{",
CloseBrace => "}",
OpenParen => "(",
CloseParen => ")",
_ => unreachable!("token is not a bracket"),
}
}
fn
|
(tok: Kind) -> Kind {
use crate::tokenizer::Kind::*;
match tok {
OpenBracket => CloseBracket,
OpenBrace => CloseBrace,
OpenParen => CloseParen,
_ => unreachable!("token is not a bracket"),
}
}
/// Minimal validation of expression
///
/// This is used for substitutions in migrations. This check merely ensures
/// that overall structure of the statement is not ruined. Mostly checks for
/// matching brackets and quotes closed.
///
/// More specificaly current implementation checks that expression is not
/// empty, checks for valid tokens, matching braces and disallows comma `,`and
/// semicolon `;` outside of brackets.
///
/// This is NOT a security measure.
pub fn check(text: &str) -> Result<(), Error> {
use crate::tokenizer::Kind::*;
use Error::*;
let mut brackets = Vec::new();
let mut parser = &mut TokenStream::new(text);
let mut empty = true;
for token in &mut parser {
let (token, pos) = match token {
Ok(t) => (t.token, t.start),
Err(combine::easy::Error::Unexpected(s)) => {
return Err(Tokenizer(
s.to_string(), parser.current_pos()));
}
Err(e) => {
return Err(Tokenizer(
e.to_string(), parser.current_pos()));
}
};
empty = false;
match token.kind {
Comma | Semicolon if brackets.is_empty() => {
return Err(UnexpectedToken(token.value.to_string(), pos));
}
OpenParen | OpenBracket | OpenBrace => {
brackets.push((token.kind, pos));
}
CloseParen | CloseBracket | CloseBrace => match brackets.pop() {
Some((opened, opened_pos)) => {
if matching_bracket(opened)!= token.kind {
return Err(BracketMismatch {
opened: bracket_str(opened),
opened_pos,
encountered: bracket_str(token.kind),
closing_pos: pos,
});
}
}
None => {
return Err(ExtraBracket(bracket_str(token.kind), pos));
}
},
_ => {}
}
};
if let Some((bracket, pos)) = brackets.pop() {
return Err(MissingBracket(bracket_str(bracket), pos));
}
if empty {
return Err(Empty);
}
Ok(())
}
|
matching_bracket
|
identifier_name
|
expr.rs
|
use crate::position::Pos;
use crate::tokenizer::{Kind, TokenStream};
/// Error of expression checking
///
/// See [check][].
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("{}: tokenizer error: {}", _1, _0)]
Tokenizer(String, Pos),
#[error(
"{}: closing bracket mismatch, opened {:?} at {}, encountered {:?}",
closing_pos, opened, opened_pos, encountered)]
BracketMismatch {
opened: &'static str,
encountered: &'static str,
opened_pos: Pos,
closing_pos: Pos,
},
#[error("{}: extra closing bracket {:?}", _1, _0)]
ExtraBracket(&'static str, Pos),
#[error("{}: bracket {:?} has never been closed", _1, _0)]
MissingBracket(&'static str, Pos),
#[error("{}: token {:?} is not allowed in expression \
(try parenthesize the expression)", _1, _0)]
UnexpectedToken(String, Pos),
#[error("expression is empty")]
Empty,
}
fn bracket_str(tok: Kind) -> &'static str {
use crate::tokenizer::Kind::*;
match tok {
OpenBracket => "[",
CloseBracket => "]",
OpenBrace => "{",
CloseBrace => "}",
OpenParen => "(",
CloseParen => ")",
_ => unreachable!("token is not a bracket"),
}
}
fn matching_bracket(tok: Kind) -> Kind {
use crate::tokenizer::Kind::*;
match tok {
OpenBracket => CloseBracket,
OpenBrace => CloseBrace,
OpenParen => CloseParen,
_ => unreachable!("token is not a bracket"),
}
}
/// Minimal validation of expression
///
/// This is used for substitutions in migrations. This check merely ensures
/// that overall structure of the statement is not ruined. Mostly checks for
/// matching brackets and quotes closed.
///
/// More specificaly current implementation checks that expression is not
/// empty, checks for valid tokens, matching braces and disallows comma `,`and
/// semicolon `;` outside of brackets.
///
/// This is NOT a security measure.
pub fn check(text: &str) -> Result<(), Error> {
use crate::tokenizer::Kind::*;
use Error::*;
let mut brackets = Vec::new();
let mut parser = &mut TokenStream::new(text);
let mut empty = true;
for token in &mut parser {
let (token, pos) = match token {
Ok(t) => (t.token, t.start),
Err(combine::easy::Error::Unexpected(s)) => {
return Err(Tokenizer(
s.to_string(), parser.current_pos()));
}
Err(e) => {
return Err(Tokenizer(
e.to_string(), parser.current_pos()));
}
};
empty = false;
match token.kind {
Comma | Semicolon if brackets.is_empty() => {
return Err(UnexpectedToken(token.value.to_string(), pos));
}
OpenParen | OpenBracket | OpenBrace => {
brackets.push((token.kind, pos));
}
CloseParen | CloseBracket | CloseBrace => match brackets.pop() {
Some((opened, opened_pos)) => {
if matching_bracket(opened)!= token.kind {
return Err(BracketMismatch {
opened: bracket_str(opened),
opened_pos,
encountered: bracket_str(token.kind),
closing_pos: pos,
});
}
}
None => {
return Err(ExtraBracket(bracket_str(token.kind), pos));
}
},
_ => {}
}
};
if let Some((bracket, pos)) = brackets.pop() {
return Err(MissingBracket(bracket_str(bracket), pos));
}
if empty
|
Ok(())
}
|
{
return Err(Empty);
}
|
conditional_block
|
expr.rs
|
use crate::position::Pos;
use crate::tokenizer::{Kind, TokenStream};
/// Error of expression checking
///
/// See [check][].
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("{}: tokenizer error: {}", _1, _0)]
Tokenizer(String, Pos),
#[error(
"{}: closing bracket mismatch, opened {:?} at {}, encountered {:?}",
closing_pos, opened, opened_pos, encountered)]
BracketMismatch {
opened: &'static str,
encountered: &'static str,
opened_pos: Pos,
closing_pos: Pos,
},
#[error("{}: extra closing bracket {:?}", _1, _0)]
ExtraBracket(&'static str, Pos),
#[error("{}: bracket {:?} has never been closed", _1, _0)]
MissingBracket(&'static str, Pos),
#[error("{}: token {:?} is not allowed in expression \
(try parenthesize the expression)", _1, _0)]
UnexpectedToken(String, Pos),
#[error("expression is empty")]
Empty,
}
fn bracket_str(tok: Kind) -> &'static str {
use crate::tokenizer::Kind::*;
match tok {
OpenBracket => "[",
CloseBracket => "]",
OpenBrace => "{",
CloseBrace => "}",
OpenParen => "(",
CloseParen => ")",
_ => unreachable!("token is not a bracket"),
}
}
fn matching_bracket(tok: Kind) -> Kind {
use crate::tokenizer::Kind::*;
match tok {
OpenBracket => CloseBracket,
OpenBrace => CloseBrace,
OpenParen => CloseParen,
_ => unreachable!("token is not a bracket"),
}
}
/// Minimal validation of expression
///
/// This is used for substitutions in migrations. This check merely ensures
/// that overall structure of the statement is not ruined. Mostly checks for
/// matching brackets and quotes closed.
///
/// More specificaly current implementation checks that expression is not
/// empty, checks for valid tokens, matching braces and disallows comma `,`and
/// semicolon `;` outside of brackets.
///
/// This is NOT a security measure.
pub fn check(text: &str) -> Result<(), Error>
|
match token.kind {
Comma | Semicolon if brackets.is_empty() => {
return Err(UnexpectedToken(token.value.to_string(), pos));
}
OpenParen | OpenBracket | OpenBrace => {
brackets.push((token.kind, pos));
}
CloseParen | CloseBracket | CloseBrace => match brackets.pop() {
Some((opened, opened_pos)) => {
if matching_bracket(opened)!= token.kind {
return Err(BracketMismatch {
opened: bracket_str(opened),
opened_pos,
encountered: bracket_str(token.kind),
closing_pos: pos,
});
}
}
None => {
return Err(ExtraBracket(bracket_str(token.kind), pos));
}
},
_ => {}
}
};
if let Some((bracket, pos)) = brackets.pop() {
return Err(MissingBracket(bracket_str(bracket), pos));
}
if empty {
return Err(Empty);
}
Ok(())
}
|
{
use crate::tokenizer::Kind::*;
use Error::*;
let mut brackets = Vec::new();
let mut parser = &mut TokenStream::new(text);
let mut empty = true;
for token in &mut parser {
let (token, pos) = match token {
Ok(t) => (t.token, t.start),
Err(combine::easy::Error::Unexpected(s)) => {
return Err(Tokenizer(
s.to_string(), parser.current_pos()));
}
Err(e) => {
return Err(Tokenizer(
e.to_string(), parser.current_pos()));
}
};
empty = false;
|
identifier_body
|
archive.rs
|
//! Creation of ar archives like for the lib and staticlib crate type
use std::collections::BTreeMap;
use std::convert::TryFrom;
use std::fs::File;
use std::io::{self, Read, Seek};
use std::path::{Path, PathBuf};
use rustc_codegen_ssa::back::archive::ArchiveBuilder;
use rustc_session::Session;
use object::read::archive::ArchiveFile;
use object::{Object, ObjectSymbol, ReadCache, SymbolKind};
#[derive(Debug)]
enum ArchiveEntry {
FromArchive { archive_index: usize, file_range: (u64, u64) },
File(PathBuf),
}
pub(crate) struct ArArchiveBuilder<'a> {
sess: &'a Session,
dst: PathBuf,
use_gnu_style_archive: bool,
no_builtin_ranlib: bool,
src_archives: Vec<File>,
// Don't use `HashMap` here, as the order is important. `rust.metadata.bin` must always be at
// the end of an archive for linkers to not get confused.
entries: Vec<(Vec<u8>, ArchiveEntry)>,
}
impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
fn new(sess: &'a Session, output: &Path, input: Option<&Path>) -> Self {
let (src_archives, entries) = if let Some(input) = input {
let read_cache = ReadCache::new(File::open(input).unwrap());
let archive = ArchiveFile::parse(&read_cache).unwrap();
let mut entries = Vec::new();
for entry in archive.members() {
let entry = entry.unwrap();
entries.push((
entry.name().to_vec(),
ArchiveEntry::FromArchive { archive_index: 0, file_range: entry.file_range() },
));
}
(vec![read_cache.into_inner()], entries)
} else {
(vec![], Vec::new())
};
ArArchiveBuilder {
sess,
dst: output.to_path_buf(),
use_gnu_style_archive: sess.target.archive_format == "gnu",
// FIXME fix builtin ranlib on macOS
no_builtin_ranlib: sess.target.is_like_osx,
src_archives,
entries,
}
}
fn src_files(&mut self) -> Vec<String> {
self.entries.iter().map(|(name, _)| String::from_utf8(name.clone()).unwrap()).collect()
}
fn remove_file(&mut self, name: &str)
|
fn add_file(&mut self, file: &Path) {
self.entries.push((
file.file_name().unwrap().to_str().unwrap().to_string().into_bytes(),
ArchiveEntry::File(file.to_owned()),
));
}
fn add_archive<F>(&mut self, archive_path: &Path, mut skip: F) -> std::io::Result<()>
where
F: FnMut(&str) -> bool +'static,
{
let read_cache = ReadCache::new(std::fs::File::open(&archive_path)?);
let archive = ArchiveFile::parse(&read_cache).unwrap();
let archive_index = self.src_archives.len();
for entry in archive.members() {
let entry = entry.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
let file_name = String::from_utf8(entry.name().to_vec())
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
if!skip(&file_name) {
self.entries.push((
file_name.into_bytes(),
ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() },
));
}
}
self.src_archives.push(read_cache.into_inner());
Ok(())
}
fn update_symbols(&mut self) {}
fn build(mut self) {
enum BuilderKind {
Bsd(ar::Builder<File>),
Gnu(ar::GnuBuilder<File>),
}
let sess = self.sess;
let mut symbol_table = BTreeMap::new();
let mut entries = Vec::new();
for (entry_name, entry) in self.entries {
// FIXME only read the symbol table of the object files to avoid having to keep all
// object files in memory at once, or read them twice.
let data = match entry {
ArchiveEntry::FromArchive { archive_index, file_range } => {
// FIXME read symbols from symtab
let src_read_cache = &mut self.src_archives[archive_index];
src_read_cache.seek(io::SeekFrom::Start(file_range.0)).unwrap();
let mut data = std::vec::from_elem(0, usize::try_from(file_range.1).unwrap());
src_read_cache.read_exact(&mut data).unwrap();
data
}
ArchiveEntry::File(file) => std::fs::read(file).unwrap_or_else(|err| {
sess.fatal(&format!(
"error while reading object file during archive building: {}",
err
));
}),
};
if!self.no_builtin_ranlib {
match object::File::parse(&*data) {
Ok(object) => {
symbol_table.insert(
entry_name.to_vec(),
object
.symbols()
.filter_map(|symbol| {
if symbol.is_undefined()
|| symbol.is_local()
|| symbol.kind()!= SymbolKind::Data
&& symbol.kind()!= SymbolKind::Text
&& symbol.kind()!= SymbolKind::Tls
{
None
} else {
symbol.name().map(|name| name.as_bytes().to_vec()).ok()
}
})
.collect::<Vec<_>>(),
);
}
Err(err) => {
let err = err.to_string();
if err == "Unknown file magic" {
// Not an object file; skip it.
} else {
sess.fatal(&format!(
"error parsing `{}` during archive creation: {}",
String::from_utf8_lossy(&entry_name),
err
));
}
}
}
}
entries.push((entry_name, data));
}
let mut builder = if self.use_gnu_style_archive {
BuilderKind::Gnu(
ar::GnuBuilder::new(
File::create(&self.dst).unwrap_or_else(|err| {
sess.fatal(&format!(
"error opening destination during archive building: {}",
err
));
}),
entries.iter().map(|(name, _)| name.clone()).collect(),
ar::GnuSymbolTableFormat::Size32,
symbol_table,
)
.unwrap(),
)
} else {
BuilderKind::Bsd(
ar::Builder::new(
File::create(&self.dst).unwrap_or_else(|err| {
sess.fatal(&format!(
"error opening destination during archive building: {}",
err
));
}),
symbol_table,
)
.unwrap(),
)
};
// Add all files
for (entry_name, data) in entries.into_iter() {
let header = ar::Header::new(entry_name, data.len() as u64);
match builder {
BuilderKind::Bsd(ref mut builder) => builder.append(&header, &mut &*data).unwrap(),
BuilderKind::Gnu(ref mut builder) => builder.append(&header, &mut &*data).unwrap(),
}
}
// Finalize archive
std::mem::drop(builder);
if self.no_builtin_ranlib {
let ranlib = crate::toolchain::get_toolchain_binary(self.sess, "ranlib");
// Run ranlib to be able to link the archive
let status = std::process::Command::new(ranlib)
.arg(self.dst)
.status()
.expect("Couldn't run ranlib");
if!status.success() {
self.sess.fatal(&format!("Ranlib exited with code {:?}", status.code()));
}
}
}
fn inject_dll_import_lib(
&mut self,
_lib_name: &str,
_dll_imports: &[rustc_session::cstore::DllImport],
_tmpdir: &rustc_data_structures::temp_dir::MaybeTempDir,
) {
bug!("injecting dll imports is not supported");
}
}
|
{
let index = self
.entries
.iter()
.position(|(entry_name, _)| entry_name == name.as_bytes())
.expect("Tried to remove file not existing in src archive");
self.entries.remove(index);
}
|
identifier_body
|
archive.rs
|
//! Creation of ar archives like for the lib and staticlib crate type
use std::collections::BTreeMap;
use std::convert::TryFrom;
use std::fs::File;
use std::io::{self, Read, Seek};
use std::path::{Path, PathBuf};
use rustc_codegen_ssa::back::archive::ArchiveBuilder;
use rustc_session::Session;
use object::read::archive::ArchiveFile;
use object::{Object, ObjectSymbol, ReadCache, SymbolKind};
#[derive(Debug)]
enum ArchiveEntry {
FromArchive { archive_index: usize, file_range: (u64, u64) },
File(PathBuf),
}
pub(crate) struct ArArchiveBuilder<'a> {
sess: &'a Session,
dst: PathBuf,
use_gnu_style_archive: bool,
no_builtin_ranlib: bool,
src_archives: Vec<File>,
// Don't use `HashMap` here, as the order is important. `rust.metadata.bin` must always be at
// the end of an archive for linkers to not get confused.
entries: Vec<(Vec<u8>, ArchiveEntry)>,
}
impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
fn new(sess: &'a Session, output: &Path, input: Option<&Path>) -> Self {
let (src_archives, entries) = if let Some(input) = input {
let read_cache = ReadCache::new(File::open(input).unwrap());
let archive = ArchiveFile::parse(&read_cache).unwrap();
let mut entries = Vec::new();
for entry in archive.members() {
let entry = entry.unwrap();
entries.push((
entry.name().to_vec(),
ArchiveEntry::FromArchive { archive_index: 0, file_range: entry.file_range() },
));
}
(vec![read_cache.into_inner()], entries)
} else {
(vec![], Vec::new())
};
ArArchiveBuilder {
sess,
dst: output.to_path_buf(),
use_gnu_style_archive: sess.target.archive_format == "gnu",
// FIXME fix builtin ranlib on macOS
no_builtin_ranlib: sess.target.is_like_osx,
src_archives,
entries,
}
}
fn src_files(&mut self) -> Vec<String> {
self.entries.iter().map(|(name, _)| String::from_utf8(name.clone()).unwrap()).collect()
}
fn remove_file(&mut self, name: &str) {
let index = self
.entries
.iter()
.position(|(entry_name, _)| entry_name == name.as_bytes())
.expect("Tried to remove file not existing in src archive");
self.entries.remove(index);
}
fn add_file(&mut self, file: &Path) {
self.entries.push((
file.file_name().unwrap().to_str().unwrap().to_string().into_bytes(),
ArchiveEntry::File(file.to_owned()),
));
}
fn add_archive<F>(&mut self, archive_path: &Path, mut skip: F) -> std::io::Result<()>
where
F: FnMut(&str) -> bool +'static,
{
let read_cache = ReadCache::new(std::fs::File::open(&archive_path)?);
let archive = ArchiveFile::parse(&read_cache).unwrap();
let archive_index = self.src_archives.len();
for entry in archive.members() {
let entry = entry.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
let file_name = String::from_utf8(entry.name().to_vec())
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
if!skip(&file_name) {
self.entries.push((
file_name.into_bytes(),
ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() },
));
}
}
self.src_archives.push(read_cache.into_inner());
Ok(())
}
fn update_symbols(&mut self) {}
fn build(mut self) {
enum BuilderKind {
Bsd(ar::Builder<File>),
Gnu(ar::GnuBuilder<File>),
}
let sess = self.sess;
let mut symbol_table = BTreeMap::new();
let mut entries = Vec::new();
for (entry_name, entry) in self.entries {
// FIXME only read the symbol table of the object files to avoid having to keep all
// object files in memory at once, or read them twice.
let data = match entry {
ArchiveEntry::FromArchive { archive_index, file_range } => {
// FIXME read symbols from symtab
let src_read_cache = &mut self.src_archives[archive_index];
src_read_cache.seek(io::SeekFrom::Start(file_range.0)).unwrap();
let mut data = std::vec::from_elem(0, usize::try_from(file_range.1).unwrap());
src_read_cache.read_exact(&mut data).unwrap();
data
}
ArchiveEntry::File(file) => std::fs::read(file).unwrap_or_else(|err| {
sess.fatal(&format!(
"error while reading object file during archive building: {}",
err
));
}),
};
if!self.no_builtin_ranlib {
match object::File::parse(&*data) {
Ok(object) => {
symbol_table.insert(
entry_name.to_vec(),
object
.symbols()
.filter_map(|symbol| {
if symbol.is_undefined()
|| symbol.is_local()
|| symbol.kind()!= SymbolKind::Data
&& symbol.kind()!= SymbolKind::Text
&& symbol.kind()!= SymbolKind::Tls
{
None
} else {
symbol.name().map(|name| name.as_bytes().to_vec()).ok()
}
})
.collect::<Vec<_>>(),
);
}
Err(err) => {
let err = err.to_string();
if err == "Unknown file magic" {
// Not an object file; skip it.
} else
|
}
}
}
entries.push((entry_name, data));
}
let mut builder = if self.use_gnu_style_archive {
BuilderKind::Gnu(
ar::GnuBuilder::new(
File::create(&self.dst).unwrap_or_else(|err| {
sess.fatal(&format!(
"error opening destination during archive building: {}",
err
));
}),
entries.iter().map(|(name, _)| name.clone()).collect(),
ar::GnuSymbolTableFormat::Size32,
symbol_table,
)
.unwrap(),
)
} else {
BuilderKind::Bsd(
ar::Builder::new(
File::create(&self.dst).unwrap_or_else(|err| {
sess.fatal(&format!(
"error opening destination during archive building: {}",
err
));
}),
symbol_table,
)
.unwrap(),
)
};
// Add all files
for (entry_name, data) in entries.into_iter() {
let header = ar::Header::new(entry_name, data.len() as u64);
match builder {
BuilderKind::Bsd(ref mut builder) => builder.append(&header, &mut &*data).unwrap(),
BuilderKind::Gnu(ref mut builder) => builder.append(&header, &mut &*data).unwrap(),
}
}
// Finalize archive
std::mem::drop(builder);
if self.no_builtin_ranlib {
let ranlib = crate::toolchain::get_toolchain_binary(self.sess, "ranlib");
// Run ranlib to be able to link the archive
let status = std::process::Command::new(ranlib)
.arg(self.dst)
.status()
.expect("Couldn't run ranlib");
if!status.success() {
self.sess.fatal(&format!("Ranlib exited with code {:?}", status.code()));
}
}
}
fn inject_dll_import_lib(
&mut self,
_lib_name: &str,
_dll_imports: &[rustc_session::cstore::DllImport],
_tmpdir: &rustc_data_structures::temp_dir::MaybeTempDir,
) {
bug!("injecting dll imports is not supported");
}
}
|
{
sess.fatal(&format!(
"error parsing `{}` during archive creation: {}",
String::from_utf8_lossy(&entry_name),
err
));
}
|
conditional_block
|
archive.rs
|
//! Creation of ar archives like for the lib and staticlib crate type
use std::collections::BTreeMap;
use std::convert::TryFrom;
use std::fs::File;
use std::io::{self, Read, Seek};
use std::path::{Path, PathBuf};
use rustc_codegen_ssa::back::archive::ArchiveBuilder;
use rustc_session::Session;
use object::read::archive::ArchiveFile;
use object::{Object, ObjectSymbol, ReadCache, SymbolKind};
#[derive(Debug)]
enum ArchiveEntry {
FromArchive { archive_index: usize, file_range: (u64, u64) },
File(PathBuf),
}
pub(crate) struct ArArchiveBuilder<'a> {
sess: &'a Session,
dst: PathBuf,
use_gnu_style_archive: bool,
no_builtin_ranlib: bool,
src_archives: Vec<File>,
// Don't use `HashMap` here, as the order is important. `rust.metadata.bin` must always be at
// the end of an archive for linkers to not get confused.
entries: Vec<(Vec<u8>, ArchiveEntry)>,
}
impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
fn new(sess: &'a Session, output: &Path, input: Option<&Path>) -> Self {
let (src_archives, entries) = if let Some(input) = input {
let read_cache = ReadCache::new(File::open(input).unwrap());
let archive = ArchiveFile::parse(&read_cache).unwrap();
let mut entries = Vec::new();
for entry in archive.members() {
let entry = entry.unwrap();
entries.push((
entry.name().to_vec(),
|
));
}
(vec![read_cache.into_inner()], entries)
} else {
(vec![], Vec::new())
};
ArArchiveBuilder {
sess,
dst: output.to_path_buf(),
use_gnu_style_archive: sess.target.archive_format == "gnu",
// FIXME fix builtin ranlib on macOS
no_builtin_ranlib: sess.target.is_like_osx,
src_archives,
entries,
}
}
fn src_files(&mut self) -> Vec<String> {
self.entries.iter().map(|(name, _)| String::from_utf8(name.clone()).unwrap()).collect()
}
fn remove_file(&mut self, name: &str) {
let index = self
.entries
.iter()
.position(|(entry_name, _)| entry_name == name.as_bytes())
.expect("Tried to remove file not existing in src archive");
self.entries.remove(index);
}
fn add_file(&mut self, file: &Path) {
self.entries.push((
file.file_name().unwrap().to_str().unwrap().to_string().into_bytes(),
ArchiveEntry::File(file.to_owned()),
));
}
fn add_archive<F>(&mut self, archive_path: &Path, mut skip: F) -> std::io::Result<()>
where
F: FnMut(&str) -> bool +'static,
{
let read_cache = ReadCache::new(std::fs::File::open(&archive_path)?);
let archive = ArchiveFile::parse(&read_cache).unwrap();
let archive_index = self.src_archives.len();
for entry in archive.members() {
let entry = entry.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
let file_name = String::from_utf8(entry.name().to_vec())
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
if!skip(&file_name) {
self.entries.push((
file_name.into_bytes(),
ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() },
));
}
}
self.src_archives.push(read_cache.into_inner());
Ok(())
}
fn update_symbols(&mut self) {}
fn build(mut self) {
enum BuilderKind {
Bsd(ar::Builder<File>),
Gnu(ar::GnuBuilder<File>),
}
let sess = self.sess;
let mut symbol_table = BTreeMap::new();
let mut entries = Vec::new();
for (entry_name, entry) in self.entries {
// FIXME only read the symbol table of the object files to avoid having to keep all
// object files in memory at once, or read them twice.
let data = match entry {
ArchiveEntry::FromArchive { archive_index, file_range } => {
// FIXME read symbols from symtab
let src_read_cache = &mut self.src_archives[archive_index];
src_read_cache.seek(io::SeekFrom::Start(file_range.0)).unwrap();
let mut data = std::vec::from_elem(0, usize::try_from(file_range.1).unwrap());
src_read_cache.read_exact(&mut data).unwrap();
data
}
ArchiveEntry::File(file) => std::fs::read(file).unwrap_or_else(|err| {
sess.fatal(&format!(
"error while reading object file during archive building: {}",
err
));
}),
};
if!self.no_builtin_ranlib {
match object::File::parse(&*data) {
Ok(object) => {
symbol_table.insert(
entry_name.to_vec(),
object
.symbols()
.filter_map(|symbol| {
if symbol.is_undefined()
|| symbol.is_local()
|| symbol.kind()!= SymbolKind::Data
&& symbol.kind()!= SymbolKind::Text
&& symbol.kind()!= SymbolKind::Tls
{
None
} else {
symbol.name().map(|name| name.as_bytes().to_vec()).ok()
}
})
.collect::<Vec<_>>(),
);
}
Err(err) => {
let err = err.to_string();
if err == "Unknown file magic" {
// Not an object file; skip it.
} else {
sess.fatal(&format!(
"error parsing `{}` during archive creation: {}",
String::from_utf8_lossy(&entry_name),
err
));
}
}
}
}
entries.push((entry_name, data));
}
let mut builder = if self.use_gnu_style_archive {
BuilderKind::Gnu(
ar::GnuBuilder::new(
File::create(&self.dst).unwrap_or_else(|err| {
sess.fatal(&format!(
"error opening destination during archive building: {}",
err
));
}),
entries.iter().map(|(name, _)| name.clone()).collect(),
ar::GnuSymbolTableFormat::Size32,
symbol_table,
)
.unwrap(),
)
} else {
BuilderKind::Bsd(
ar::Builder::new(
File::create(&self.dst).unwrap_or_else(|err| {
sess.fatal(&format!(
"error opening destination during archive building: {}",
err
));
}),
symbol_table,
)
.unwrap(),
)
};
// Add all files
for (entry_name, data) in entries.into_iter() {
let header = ar::Header::new(entry_name, data.len() as u64);
match builder {
BuilderKind::Bsd(ref mut builder) => builder.append(&header, &mut &*data).unwrap(),
BuilderKind::Gnu(ref mut builder) => builder.append(&header, &mut &*data).unwrap(),
}
}
// Finalize archive
std::mem::drop(builder);
if self.no_builtin_ranlib {
let ranlib = crate::toolchain::get_toolchain_binary(self.sess, "ranlib");
// Run ranlib to be able to link the archive
let status = std::process::Command::new(ranlib)
.arg(self.dst)
.status()
.expect("Couldn't run ranlib");
if!status.success() {
self.sess.fatal(&format!("Ranlib exited with code {:?}", status.code()));
}
}
}
fn inject_dll_import_lib(
&mut self,
_lib_name: &str,
_dll_imports: &[rustc_session::cstore::DllImport],
_tmpdir: &rustc_data_structures::temp_dir::MaybeTempDir,
) {
bug!("injecting dll imports is not supported");
}
}
|
ArchiveEntry::FromArchive { archive_index: 0, file_range: entry.file_range() },
|
random_line_split
|
archive.rs
|
//! Creation of ar archives like for the lib and staticlib crate type
use std::collections::BTreeMap;
use std::convert::TryFrom;
use std::fs::File;
use std::io::{self, Read, Seek};
use std::path::{Path, PathBuf};
use rustc_codegen_ssa::back::archive::ArchiveBuilder;
use rustc_session::Session;
use object::read::archive::ArchiveFile;
use object::{Object, ObjectSymbol, ReadCache, SymbolKind};
#[derive(Debug)]
enum ArchiveEntry {
FromArchive { archive_index: usize, file_range: (u64, u64) },
File(PathBuf),
}
pub(crate) struct ArArchiveBuilder<'a> {
sess: &'a Session,
dst: PathBuf,
use_gnu_style_archive: bool,
no_builtin_ranlib: bool,
src_archives: Vec<File>,
// Don't use `HashMap` here, as the order is important. `rust.metadata.bin` must always be at
// the end of an archive for linkers to not get confused.
entries: Vec<(Vec<u8>, ArchiveEntry)>,
}
impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
fn new(sess: &'a Session, output: &Path, input: Option<&Path>) -> Self {
let (src_archives, entries) = if let Some(input) = input {
let read_cache = ReadCache::new(File::open(input).unwrap());
let archive = ArchiveFile::parse(&read_cache).unwrap();
let mut entries = Vec::new();
for entry in archive.members() {
let entry = entry.unwrap();
entries.push((
entry.name().to_vec(),
ArchiveEntry::FromArchive { archive_index: 0, file_range: entry.file_range() },
));
}
(vec![read_cache.into_inner()], entries)
} else {
(vec![], Vec::new())
};
ArArchiveBuilder {
sess,
dst: output.to_path_buf(),
use_gnu_style_archive: sess.target.archive_format == "gnu",
// FIXME fix builtin ranlib on macOS
no_builtin_ranlib: sess.target.is_like_osx,
src_archives,
entries,
}
}
fn src_files(&mut self) -> Vec<String> {
self.entries.iter().map(|(name, _)| String::from_utf8(name.clone()).unwrap()).collect()
}
fn remove_file(&mut self, name: &str) {
let index = self
.entries
.iter()
.position(|(entry_name, _)| entry_name == name.as_bytes())
.expect("Tried to remove file not existing in src archive");
self.entries.remove(index);
}
fn add_file(&mut self, file: &Path) {
self.entries.push((
file.file_name().unwrap().to_str().unwrap().to_string().into_bytes(),
ArchiveEntry::File(file.to_owned()),
));
}
fn add_archive<F>(&mut self, archive_path: &Path, mut skip: F) -> std::io::Result<()>
where
F: FnMut(&str) -> bool +'static,
{
let read_cache = ReadCache::new(std::fs::File::open(&archive_path)?);
let archive = ArchiveFile::parse(&read_cache).unwrap();
let archive_index = self.src_archives.len();
for entry in archive.members() {
let entry = entry.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
let file_name = String::from_utf8(entry.name().to_vec())
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
if!skip(&file_name) {
self.entries.push((
file_name.into_bytes(),
ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() },
));
}
}
self.src_archives.push(read_cache.into_inner());
Ok(())
}
fn update_symbols(&mut self) {}
fn build(mut self) {
enum BuilderKind {
Bsd(ar::Builder<File>),
Gnu(ar::GnuBuilder<File>),
}
let sess = self.sess;
let mut symbol_table = BTreeMap::new();
let mut entries = Vec::new();
for (entry_name, entry) in self.entries {
// FIXME only read the symbol table of the object files to avoid having to keep all
// object files in memory at once, or read them twice.
let data = match entry {
ArchiveEntry::FromArchive { archive_index, file_range } => {
// FIXME read symbols from symtab
let src_read_cache = &mut self.src_archives[archive_index];
src_read_cache.seek(io::SeekFrom::Start(file_range.0)).unwrap();
let mut data = std::vec::from_elem(0, usize::try_from(file_range.1).unwrap());
src_read_cache.read_exact(&mut data).unwrap();
data
}
ArchiveEntry::File(file) => std::fs::read(file).unwrap_or_else(|err| {
sess.fatal(&format!(
"error while reading object file during archive building: {}",
err
));
}),
};
if!self.no_builtin_ranlib {
match object::File::parse(&*data) {
Ok(object) => {
symbol_table.insert(
entry_name.to_vec(),
object
.symbols()
.filter_map(|symbol| {
if symbol.is_undefined()
|| symbol.is_local()
|| symbol.kind()!= SymbolKind::Data
&& symbol.kind()!= SymbolKind::Text
&& symbol.kind()!= SymbolKind::Tls
{
None
} else {
symbol.name().map(|name| name.as_bytes().to_vec()).ok()
}
})
.collect::<Vec<_>>(),
);
}
Err(err) => {
let err = err.to_string();
if err == "Unknown file magic" {
// Not an object file; skip it.
} else {
sess.fatal(&format!(
"error parsing `{}` during archive creation: {}",
String::from_utf8_lossy(&entry_name),
err
));
}
}
}
}
entries.push((entry_name, data));
}
let mut builder = if self.use_gnu_style_archive {
BuilderKind::Gnu(
ar::GnuBuilder::new(
File::create(&self.dst).unwrap_or_else(|err| {
sess.fatal(&format!(
"error opening destination during archive building: {}",
err
));
}),
entries.iter().map(|(name, _)| name.clone()).collect(),
ar::GnuSymbolTableFormat::Size32,
symbol_table,
)
.unwrap(),
)
} else {
BuilderKind::Bsd(
ar::Builder::new(
File::create(&self.dst).unwrap_or_else(|err| {
sess.fatal(&format!(
"error opening destination during archive building: {}",
err
));
}),
symbol_table,
)
.unwrap(),
)
};
// Add all files
for (entry_name, data) in entries.into_iter() {
let header = ar::Header::new(entry_name, data.len() as u64);
match builder {
BuilderKind::Bsd(ref mut builder) => builder.append(&header, &mut &*data).unwrap(),
BuilderKind::Gnu(ref mut builder) => builder.append(&header, &mut &*data).unwrap(),
}
}
// Finalize archive
std::mem::drop(builder);
if self.no_builtin_ranlib {
let ranlib = crate::toolchain::get_toolchain_binary(self.sess, "ranlib");
// Run ranlib to be able to link the archive
let status = std::process::Command::new(ranlib)
.arg(self.dst)
.status()
.expect("Couldn't run ranlib");
if!status.success() {
self.sess.fatal(&format!("Ranlib exited with code {:?}", status.code()));
}
}
}
fn
|
(
&mut self,
_lib_name: &str,
_dll_imports: &[rustc_session::cstore::DllImport],
_tmpdir: &rustc_data_structures::temp_dir::MaybeTempDir,
) {
bug!("injecting dll imports is not supported");
}
}
|
inject_dll_import_lib
|
identifier_name
|
borrowck-newtype-issue-2573.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct foo {bar: baz}
struct baz_ {baz: int}
type baz = @mut baz_;
trait frob {
fn frob(&self);
}
impl frob for foo {
fn frob(&self)
|
}
// Override default mode so that we are passing by value
fn really_impure(++bar: baz) {
bar.baz = 3;
}
pub fn main() {}
|
{
really_impure(self.bar);
}
|
identifier_body
|
borrowck-newtype-issue-2573.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct foo {bar: baz}
struct baz_ {baz: int}
type baz = @mut baz_;
trait frob {
|
impl frob for foo {
fn frob(&self) {
really_impure(self.bar);
}
}
// Override default mode so that we are passing by value
fn really_impure(++bar: baz) {
bar.baz = 3;
}
pub fn main() {}
|
fn frob(&self);
}
|
random_line_split
|
borrowck-newtype-issue-2573.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct foo {bar: baz}
struct baz_ {baz: int}
type baz = @mut baz_;
trait frob {
fn frob(&self);
}
impl frob for foo {
fn
|
(&self) {
really_impure(self.bar);
}
}
// Override default mode so that we are passing by value
fn really_impure(++bar: baz) {
bar.baz = 3;
}
pub fn main() {}
|
frob
|
identifier_name
|
func.rs
|
//! Module for working with `Function`s
//!
//! This module contains a type alias for `Rc<Fn(f64) -> f64>`,
//! which is used in many other modules, and functions for
//! working with the alias.
pub use std::rc::Rc;
/// Type alias used to represent functions.
///
/// A `Function` is a `Fn` that takes a single `f64`,
/// does something with it, and returns another `f64`.
///
/// Functions are stored in an `Rc` so they can be `cloned()`
/// and subsequently consumed in other functions.
pub type Function = Rc<dyn Fn(f64) -> f64>;
/// Macro for creating a `Function`.
///
|
/// ```
/// # #[macro_use]
/// # extern crate reikna;
/// # fn main() {
/// use reikna::func::*;
/// let f: Function = func!(|x| x * x);
/// assert_eq!(f(5.0), 25.0);
/// # }
/// ```
#[macro_export]
macro_rules! func {
($e:expr) => (Rc::new($e) as Function);
}
|
/// More idiomatic than calling `Rc::new()`.
///
/// # Examples
///
|
random_line_split
|
expr-alt-struct.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// -*- rust -*-
// Tests for match as expressions resulting in struct types
struct R { i: int }
fn
|
() {
let rs = match true { true => R {i: 100}, _ => fail!() };
assert!((rs.i == 100));
}
enum mood { happy, sad, }
impl cmp::Eq for mood {
fn eq(&self, other: &mood) -> bool {
((*self) as uint) == ((*other) as uint)
}
fn ne(&self, other: &mood) -> bool {!(*self).eq(other) }
}
fn test_tag() {
let rs = match true { true => { happy } false => { sad } };
assert!((rs == happy));
}
pub fn main() { test_rec(); test_tag(); }
|
test_rec
|
identifier_name
|
expr-alt-struct.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// -*- rust -*-
// Tests for match as expressions resulting in struct types
struct R { i: int }
fn test_rec() {
let rs = match true { true => R {i: 100}, _ => fail!() };
assert!((rs.i == 100));
}
enum mood { happy, sad, }
impl cmp::Eq for mood {
fn eq(&self, other: &mood) -> bool {
((*self) as uint) == ((*other) as uint)
}
fn ne(&self, other: &mood) -> bool {!(*self).eq(other) }
}
fn test_tag() {
let rs = match true { true => { happy } false => { sad } };
assert!((rs == happy));
}
pub fn main()
|
{ test_rec(); test_tag(); }
|
identifier_body
|
|
expr-alt-struct.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// -*- rust -*-
// Tests for match as expressions resulting in struct types
struct R { i: int }
fn test_rec() {
let rs = match true { true => R {i: 100}, _ => fail!() };
assert!((rs.i == 100));
}
enum mood { happy, sad, }
impl cmp::Eq for mood {
fn eq(&self, other: &mood) -> bool {
((*self) as uint) == ((*other) as uint)
}
fn ne(&self, other: &mood) -> bool {!(*self).eq(other) }
}
fn test_tag() {
let rs = match true { true => { happy } false => { sad } };
assert!((rs == happy));
}
pub fn main() { test_rec(); test_tag(); }
|
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
|
random_line_split
|
container.rs
|
#![macro_use]
pub trait FromEcs<E: ContainsSystem> where Self: Sized {
fn from_ecs(ecs: &E) -> &Self;
}
pub trait FromEcsMut<E: ContainsMutSystem>: FromEcs<E> {
fn from_ecs_mut(ecs: &mut E) -> &mut Self;
}
pub trait ContainsSystem where Self: Sized {
fn get_system<S>(&self) -> &S
where S: FromEcs<Self>;
}
pub trait ContainsMutSystem: ContainsSystem {
fn get_system_mut<S>(&mut self) -> &mut S
where S: FromEcsMut<Self>;
}
#[macro_export]
macro_rules! create_container {(
with_systems {
$($sys_id:ident => $sys_type:ty = $cmp_type:ty),+
},
with_updaters {
$($upd_id:ident updates $upd_sys_id:ident => $upd_type:ty),+
}
) => (
|
pub struct EcsContainer {
pub entity_factory: EntityFactory,
$(pub $sys_id: $sys_type,)+
$(pub $upd_id: $upd_type,)+
}
impl EcsContainer {
pub fn new_entity(&mut self) -> EntityConfiguration<Self> {
let entity = self.entity_factory.new_entity();
self.configure_entity(entity)
}
pub fn configure_entity(&mut self, entity: Entity) -> EntityConfiguration<Self> {
EntityConfiguration::new(self, entity)
}
pub fn update(&mut self, dt: f64) {
$(
{
let res = self.$upd_id.update(&self.$upd_sys_id, &self, dt);
res.post_update(self);
}
)+
}
}
impl ContainsSystem for EcsContainer {
fn get_system<S>(&self) -> &S
where S: FromEcs<Self> {
FromEcs::from_ecs(self)
}
}
impl ContainsMutSystem for EcsContainer {
fn get_system_mut<S>(&mut self) -> &mut S
where S: FromEcsMut<Self> {
FromEcsMut::from_ecs_mut(self)
}
}
$(
impl FromEcs<EcsContainer> for $sys_type {
fn from_ecs(ecs: &EcsContainer) -> &$sys_type {
&ecs.$sys_id
}
}
impl FromEcsMut<EcsContainer> for $sys_type {
fn from_ecs_mut(ecs: &mut EcsContainer) -> &mut $sys_type {
&mut ecs.$sys_id
}
}
impl<'a> ConfiguresComponent<$cmp_type> for EntityConfiguration<'a, EcsContainer> {
fn with_component(self, component: $cmp_type) -> Self {
self.with_component_for_system::<$cmp_type, $sys_type>(component)
}
}
)+)
}
|
pub trait ConfiguresComponent<C> {
fn with_component(self, component: C) -> Self;
}
|
random_line_split
|
location.rs
|
header! {
/// `Location` header, defined in
/// [RFC7231](http://tools.ietf.org/html/rfc7231#section-7.1.2)
///
/// The `Location` header field is used in some responses to refer to a
/// specific resource in relation to the response. The type of
/// relationship is defined by the combination of request method and
/// status code semantics.
///
/// # ABNF
/// ```plain
/// Location = URI-reference
/// ```
///
/// # Example values
/// * `/People.html#tim`
/// * `http://www.example.net/index.html`
///
/// # Examples
/// ```
/// use hyper::header::{Headers, Location};
///
/// let mut headers = Headers::new();
/// headers.set(Location::new("/People.html#tim"));
/// ```
/// ```
/// use hyper::header::{Headers, Location};
///
/// let mut headers = Headers::new();
/// headers.set(Location::new("http://www.example.com/index.html"));
/// ```
// TODO: Use URL
(Location, "Location") => Cow[str]
test_location {
|
test_header!(test2, vec![b"http://www.example.net/index.html"]);
}
}
bench_header!(bench, Location, { vec![b"http://foo.com/hello:3000".to_vec()] });
|
// Testcase from RFC
test_header!(test1, vec![b"/People.html#tim"]);
|
random_line_split
|
domrect.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::DOMRectBinding;
use dom::bindings::codegen::Bindings::DOMRectBinding::DOMRectMethods;
use dom::bindings::codegen::Bindings::DOMRectReadOnlyBinding::DOMRectReadOnlyMethods;
use dom::bindings::error::Fallible;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::domrectreadonly::DOMRectReadOnly;
use dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
#[dom_struct]
pub struct DOMRect {
rect: DOMRectReadOnly,
}
impl DOMRect {
fn new_inherited(x: f64, y: f64, width: f64, height: f64) -> DOMRect {
DOMRect {
rect: DOMRectReadOnly::new_inherited(x, y, width, height),
}
}
pub fn new(global: &GlobalScope, x: f64, y: f64, width: f64, height: f64) -> DomRoot<DOMRect> {
reflect_dom_object(Box::new(DOMRect::new_inherited(x, y, width, height)),
global,
DOMRectBinding::Wrap)
}
pub fn Constructor(global: &GlobalScope,
x: f64,
y: f64,
width: f64,
height: f64)
-> Fallible<DomRoot<DOMRect>> {
Ok(DOMRect::new(global, x, y, width, height))
}
}
impl DOMRectMethods for DOMRect {
// https://drafts.fxtf.org/geometry/#dom-domrect-x
fn X(&self) -> f64 {
self.rect.X()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-x
fn SetX(&self, value: f64) {
self.rect.set_x(value);
}
// https://drafts.fxtf.org/geometry/#dom-domrect-y
fn Y(&self) -> f64 {
self.rect.Y()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-y
fn SetY(&self, value: f64) {
self.rect.set_y(value);
}
// https://drafts.fxtf.org/geometry/#dom-domrect-width
fn Width(&self) -> f64 {
self.rect.Width()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-width
fn SetWidth(&self, value: f64) {
self.rect.set_width(value);
}
// https://drafts.fxtf.org/geometry/#dom-domrect-height
fn Height(&self) -> f64 {
self.rect.Height()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-height
fn SetHeight(&self, value: f64)
|
}
|
{
self.rect.set_height(value);
}
|
identifier_body
|
domrect.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::DOMRectBinding;
use dom::bindings::codegen::Bindings::DOMRectBinding::DOMRectMethods;
use dom::bindings::codegen::Bindings::DOMRectReadOnlyBinding::DOMRectReadOnlyMethods;
use dom::bindings::error::Fallible;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::domrectreadonly::DOMRectReadOnly;
use dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
#[dom_struct]
pub struct DOMRect {
rect: DOMRectReadOnly,
}
impl DOMRect {
fn new_inherited(x: f64, y: f64, width: f64, height: f64) -> DOMRect {
DOMRect {
rect: DOMRectReadOnly::new_inherited(x, y, width, height),
}
}
pub fn new(global: &GlobalScope, x: f64, y: f64, width: f64, height: f64) -> DomRoot<DOMRect> {
reflect_dom_object(Box::new(DOMRect::new_inherited(x, y, width, height)),
global,
DOMRectBinding::Wrap)
}
pub fn Constructor(global: &GlobalScope,
x: f64,
y: f64,
width: f64,
height: f64)
-> Fallible<DomRoot<DOMRect>> {
Ok(DOMRect::new(global, x, y, width, height))
}
}
impl DOMRectMethods for DOMRect {
// https://drafts.fxtf.org/geometry/#dom-domrect-x
fn X(&self) -> f64 {
self.rect.X()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-x
fn SetX(&self, value: f64) {
self.rect.set_x(value);
}
// https://drafts.fxtf.org/geometry/#dom-domrect-y
fn Y(&self) -> f64 {
self.rect.Y()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-y
fn SetY(&self, value: f64) {
self.rect.set_y(value);
}
// https://drafts.fxtf.org/geometry/#dom-domrect-width
fn
|
(&self) -> f64 {
self.rect.Width()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-width
fn SetWidth(&self, value: f64) {
self.rect.set_width(value);
}
// https://drafts.fxtf.org/geometry/#dom-domrect-height
fn Height(&self) -> f64 {
self.rect.Height()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-height
fn SetHeight(&self, value: f64) {
self.rect.set_height(value);
}
}
|
Width
|
identifier_name
|
domrect.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::DOMRectBinding;
use dom::bindings::codegen::Bindings::DOMRectBinding::DOMRectMethods;
use dom::bindings::codegen::Bindings::DOMRectReadOnlyBinding::DOMRectReadOnlyMethods;
use dom::bindings::error::Fallible;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::domrectreadonly::DOMRectReadOnly;
use dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
#[dom_struct]
pub struct DOMRect {
|
impl DOMRect {
fn new_inherited(x: f64, y: f64, width: f64, height: f64) -> DOMRect {
DOMRect {
rect: DOMRectReadOnly::new_inherited(x, y, width, height),
}
}
pub fn new(global: &GlobalScope, x: f64, y: f64, width: f64, height: f64) -> DomRoot<DOMRect> {
reflect_dom_object(Box::new(DOMRect::new_inherited(x, y, width, height)),
global,
DOMRectBinding::Wrap)
}
pub fn Constructor(global: &GlobalScope,
x: f64,
y: f64,
width: f64,
height: f64)
-> Fallible<DomRoot<DOMRect>> {
Ok(DOMRect::new(global, x, y, width, height))
}
}
impl DOMRectMethods for DOMRect {
// https://drafts.fxtf.org/geometry/#dom-domrect-x
fn X(&self) -> f64 {
self.rect.X()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-x
fn SetX(&self, value: f64) {
self.rect.set_x(value);
}
// https://drafts.fxtf.org/geometry/#dom-domrect-y
fn Y(&self) -> f64 {
self.rect.Y()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-y
fn SetY(&self, value: f64) {
self.rect.set_y(value);
}
// https://drafts.fxtf.org/geometry/#dom-domrect-width
fn Width(&self) -> f64 {
self.rect.Width()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-width
fn SetWidth(&self, value: f64) {
self.rect.set_width(value);
}
// https://drafts.fxtf.org/geometry/#dom-domrect-height
fn Height(&self) -> f64 {
self.rect.Height()
}
// https://drafts.fxtf.org/geometry/#dom-domrect-height
fn SetHeight(&self, value: f64) {
self.rect.set_height(value);
}
}
|
rect: DOMRectReadOnly,
}
|
random_line_split
|
woff_metadata_license_generated.rs
|
// automatically generated by the FlatBuffers compiler, do not modify
extern crate flatbuffers;
use std::mem;
use std::cmp::Ordering;
use self::flatbuffers::{EndianScalar, Follow};
use super::*;
pub enum WoffMetadataLicenseOffset {}
#[derive(Copy, Clone, PartialEq)]
|
pub struct WoffMetadataLicense<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for WoffMetadataLicense<'a> {
type Inner = WoffMetadataLicense<'a>;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table { buf, loc } }
}
}
impl<'a> WoffMetadataLicense<'a> {
pub const VT_URL: flatbuffers::VOffsetT = 4;
pub const VT_ID: flatbuffers::VOffsetT = 6;
pub const VT_TEXT: flatbuffers::VOffsetT = 8;
#[inline]
pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
WoffMetadataLicense { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args:'mut_bldr,'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args WoffMetadataLicenseArgs<'args>
) -> flatbuffers::WIPOffset<WoffMetadataLicense<'bldr>> {
let mut builder = WoffMetadataLicenseBuilder::new(_fbb);
if let Some(x) = args.text { builder.add_text(x); }
if let Some(x) = args.id { builder.add_id(x); }
if let Some(x) = args.url { builder.add_url(x); }
builder.finish()
}
#[inline]
pub fn url(&self) -> Option<&'a str> {
self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(WoffMetadataLicense::VT_URL, None)
}
#[inline]
pub fn id(&self) -> Option<&'a str> {
self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(WoffMetadataLicense::VT_ID, None)
}
#[inline]
pub fn text(&self) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<WoffMetadataText<'a>>>> {
self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<WoffMetadataText>>>>(WoffMetadataLicense::VT_TEXT, None)
}
}
impl flatbuffers::Verifiable for WoffMetadataLicense<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<&str>>("url", Self::VT_URL, false)?
.visit_field::<flatbuffers::ForwardsUOffset<&str>>("id", Self::VT_ID, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<WoffMetadataText>>>>("text", Self::VT_TEXT, false)?
.finish();
Ok(())
}
}
pub struct WoffMetadataLicenseArgs<'a> {
pub url: Option<flatbuffers::WIPOffset<&'a str>>,
pub id: Option<flatbuffers::WIPOffset<&'a str>>,
pub text: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<WoffMetadataText<'a>>>>>,
}
impl<'a> Default for WoffMetadataLicenseArgs<'a> {
#[inline]
fn default() -> Self {
WoffMetadataLicenseArgs {
url: None,
id: None,
text: None,
}
}
}
pub struct WoffMetadataLicenseBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> WoffMetadataLicenseBuilder<'a, 'b> {
#[inline]
pub fn add_url(&mut self, url: flatbuffers::WIPOffset<&'b str>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(WoffMetadataLicense::VT_URL, url);
}
#[inline]
pub fn add_id(&mut self, id: flatbuffers::WIPOffset<&'b str>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(WoffMetadataLicense::VT_ID, id);
}
#[inline]
pub fn add_text(&mut self, text: flatbuffers::WIPOffset<flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<WoffMetadataText<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(WoffMetadataLicense::VT_TEXT, text);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> WoffMetadataLicenseBuilder<'a, 'b> {
let start = _fbb.start_table();
WoffMetadataLicenseBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<WoffMetadataLicense<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl std::fmt::Debug for WoffMetadataLicense<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut ds = f.debug_struct("WoffMetadataLicense");
ds.field("url", &self.url());
ds.field("id", &self.id());
ds.field("text", &self.text());
ds.finish()
}
}
|
random_line_split
|
|
woff_metadata_license_generated.rs
|
// automatically generated by the FlatBuffers compiler, do not modify
extern crate flatbuffers;
use std::mem;
use std::cmp::Ordering;
use self::flatbuffers::{EndianScalar, Follow};
use super::*;
pub enum WoffMetadataLicenseOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct WoffMetadataLicense<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for WoffMetadataLicense<'a> {
type Inner = WoffMetadataLicense<'a>;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table { buf, loc } }
}
}
impl<'a> WoffMetadataLicense<'a> {
pub const VT_URL: flatbuffers::VOffsetT = 4;
pub const VT_ID: flatbuffers::VOffsetT = 6;
pub const VT_TEXT: flatbuffers::VOffsetT = 8;
#[inline]
pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
WoffMetadataLicense { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args:'mut_bldr,'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args WoffMetadataLicenseArgs<'args>
) -> flatbuffers::WIPOffset<WoffMetadataLicense<'bldr>> {
let mut builder = WoffMetadataLicenseBuilder::new(_fbb);
if let Some(x) = args.text { builder.add_text(x); }
if let Some(x) = args.id { builder.add_id(x); }
if let Some(x) = args.url
|
builder.finish()
}
#[inline]
pub fn url(&self) -> Option<&'a str> {
self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(WoffMetadataLicense::VT_URL, None)
}
#[inline]
pub fn id(&self) -> Option<&'a str> {
self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(WoffMetadataLicense::VT_ID, None)
}
#[inline]
pub fn text(&self) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<WoffMetadataText<'a>>>> {
self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<WoffMetadataText>>>>(WoffMetadataLicense::VT_TEXT, None)
}
}
impl flatbuffers::Verifiable for WoffMetadataLicense<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<&str>>("url", Self::VT_URL, false)?
.visit_field::<flatbuffers::ForwardsUOffset<&str>>("id", Self::VT_ID, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<WoffMetadataText>>>>("text", Self::VT_TEXT, false)?
.finish();
Ok(())
}
}
pub struct WoffMetadataLicenseArgs<'a> {
pub url: Option<flatbuffers::WIPOffset<&'a str>>,
pub id: Option<flatbuffers::WIPOffset<&'a str>>,
pub text: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<WoffMetadataText<'a>>>>>,
}
impl<'a> Default for WoffMetadataLicenseArgs<'a> {
#[inline]
fn default() -> Self {
WoffMetadataLicenseArgs {
url: None,
id: None,
text: None,
}
}
}
pub struct WoffMetadataLicenseBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> WoffMetadataLicenseBuilder<'a, 'b> {
#[inline]
pub fn add_url(&mut self, url: flatbuffers::WIPOffset<&'b str>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(WoffMetadataLicense::VT_URL, url);
}
#[inline]
pub fn add_id(&mut self, id: flatbuffers::WIPOffset<&'b str>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(WoffMetadataLicense::VT_ID, id);
}
#[inline]
pub fn add_text(&mut self, text: flatbuffers::WIPOffset<flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<WoffMetadataText<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(WoffMetadataLicense::VT_TEXT, text);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> WoffMetadataLicenseBuilder<'a, 'b> {
let start = _fbb.start_table();
WoffMetadataLicenseBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<WoffMetadataLicense<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl std::fmt::Debug for WoffMetadataLicense<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut ds = f.debug_struct("WoffMetadataLicense");
ds.field("url", &self.url());
ds.field("id", &self.id());
ds.field("text", &self.text());
ds.finish()
}
}
|
{ builder.add_url(x); }
|
conditional_block
|
woff_metadata_license_generated.rs
|
// automatically generated by the FlatBuffers compiler, do not modify
extern crate flatbuffers;
use std::mem;
use std::cmp::Ordering;
use self::flatbuffers::{EndianScalar, Follow};
use super::*;
pub enum WoffMetadataLicenseOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct WoffMetadataLicense<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for WoffMetadataLicense<'a> {
type Inner = WoffMetadataLicense<'a>;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table { buf, loc } }
}
}
impl<'a> WoffMetadataLicense<'a> {
pub const VT_URL: flatbuffers::VOffsetT = 4;
pub const VT_ID: flatbuffers::VOffsetT = 6;
pub const VT_TEXT: flatbuffers::VOffsetT = 8;
#[inline]
pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
WoffMetadataLicense { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args:'mut_bldr,'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args WoffMetadataLicenseArgs<'args>
) -> flatbuffers::WIPOffset<WoffMetadataLicense<'bldr>> {
let mut builder = WoffMetadataLicenseBuilder::new(_fbb);
if let Some(x) = args.text { builder.add_text(x); }
if let Some(x) = args.id { builder.add_id(x); }
if let Some(x) = args.url { builder.add_url(x); }
builder.finish()
}
#[inline]
pub fn url(&self) -> Option<&'a str> {
self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(WoffMetadataLicense::VT_URL, None)
}
#[inline]
pub fn id(&self) -> Option<&'a str> {
self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(WoffMetadataLicense::VT_ID, None)
}
#[inline]
pub fn text(&self) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<WoffMetadataText<'a>>>> {
self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<WoffMetadataText>>>>(WoffMetadataLicense::VT_TEXT, None)
}
}
impl flatbuffers::Verifiable for WoffMetadataLicense<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer>
|
}
pub struct WoffMetadataLicenseArgs<'a> {
pub url: Option<flatbuffers::WIPOffset<&'a str>>,
pub id: Option<flatbuffers::WIPOffset<&'a str>>,
pub text: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<WoffMetadataText<'a>>>>>,
}
impl<'a> Default for WoffMetadataLicenseArgs<'a> {
#[inline]
fn default() -> Self {
WoffMetadataLicenseArgs {
url: None,
id: None,
text: None,
}
}
}
pub struct WoffMetadataLicenseBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> WoffMetadataLicenseBuilder<'a, 'b> {
#[inline]
pub fn add_url(&mut self, url: flatbuffers::WIPOffset<&'b str>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(WoffMetadataLicense::VT_URL, url);
}
#[inline]
pub fn add_id(&mut self, id: flatbuffers::WIPOffset<&'b str>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(WoffMetadataLicense::VT_ID, id);
}
#[inline]
pub fn add_text(&mut self, text: flatbuffers::WIPOffset<flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<WoffMetadataText<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(WoffMetadataLicense::VT_TEXT, text);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> WoffMetadataLicenseBuilder<'a, 'b> {
let start = _fbb.start_table();
WoffMetadataLicenseBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<WoffMetadataLicense<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl std::fmt::Debug for WoffMetadataLicense<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut ds = f.debug_struct("WoffMetadataLicense");
ds.field("url", &self.url());
ds.field("id", &self.id());
ds.field("text", &self.text());
ds.finish()
}
}
|
{
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<&str>>("url", Self::VT_URL, false)?
.visit_field::<flatbuffers::ForwardsUOffset<&str>>("id", Self::VT_ID, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<WoffMetadataText>>>>("text", Self::VT_TEXT, false)?
.finish();
Ok(())
}
|
identifier_body
|
woff_metadata_license_generated.rs
|
// automatically generated by the FlatBuffers compiler, do not modify
extern crate flatbuffers;
use std::mem;
use std::cmp::Ordering;
use self::flatbuffers::{EndianScalar, Follow};
use super::*;
pub enum WoffMetadataLicenseOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct WoffMetadataLicense<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for WoffMetadataLicense<'a> {
type Inner = WoffMetadataLicense<'a>;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table { buf, loc } }
}
}
impl<'a> WoffMetadataLicense<'a> {
pub const VT_URL: flatbuffers::VOffsetT = 4;
pub const VT_ID: flatbuffers::VOffsetT = 6;
pub const VT_TEXT: flatbuffers::VOffsetT = 8;
#[inline]
pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
WoffMetadataLicense { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args:'mut_bldr,'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args WoffMetadataLicenseArgs<'args>
) -> flatbuffers::WIPOffset<WoffMetadataLicense<'bldr>> {
let mut builder = WoffMetadataLicenseBuilder::new(_fbb);
if let Some(x) = args.text { builder.add_text(x); }
if let Some(x) = args.id { builder.add_id(x); }
if let Some(x) = args.url { builder.add_url(x); }
builder.finish()
}
#[inline]
pub fn url(&self) -> Option<&'a str> {
self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(WoffMetadataLicense::VT_URL, None)
}
#[inline]
pub fn id(&self) -> Option<&'a str> {
self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(WoffMetadataLicense::VT_ID, None)
}
#[inline]
pub fn text(&self) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<WoffMetadataText<'a>>>> {
self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<WoffMetadataText>>>>(WoffMetadataLicense::VT_TEXT, None)
}
}
impl flatbuffers::Verifiable for WoffMetadataLicense<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<&str>>("url", Self::VT_URL, false)?
.visit_field::<flatbuffers::ForwardsUOffset<&str>>("id", Self::VT_ID, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<WoffMetadataText>>>>("text", Self::VT_TEXT, false)?
.finish();
Ok(())
}
}
pub struct WoffMetadataLicenseArgs<'a> {
pub url: Option<flatbuffers::WIPOffset<&'a str>>,
pub id: Option<flatbuffers::WIPOffset<&'a str>>,
pub text: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<WoffMetadataText<'a>>>>>,
}
impl<'a> Default for WoffMetadataLicenseArgs<'a> {
#[inline]
fn default() -> Self {
WoffMetadataLicenseArgs {
url: None,
id: None,
text: None,
}
}
}
pub struct WoffMetadataLicenseBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> WoffMetadataLicenseBuilder<'a, 'b> {
#[inline]
pub fn add_url(&mut self, url: flatbuffers::WIPOffset<&'b str>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(WoffMetadataLicense::VT_URL, url);
}
#[inline]
pub fn add_id(&mut self, id: flatbuffers::WIPOffset<&'b str>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(WoffMetadataLicense::VT_ID, id);
}
#[inline]
pub fn add_text(&mut self, text: flatbuffers::WIPOffset<flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<WoffMetadataText<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(WoffMetadataLicense::VT_TEXT, text);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> WoffMetadataLicenseBuilder<'a, 'b> {
let start = _fbb.start_table();
WoffMetadataLicenseBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn
|
(self) -> flatbuffers::WIPOffset<WoffMetadataLicense<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl std::fmt::Debug for WoffMetadataLicense<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut ds = f.debug_struct("WoffMetadataLicense");
ds.field("url", &self.url());
ds.field("id", &self.id());
ds.field("text", &self.text());
ds.finish()
}
}
|
finish
|
identifier_name
|
managed-pointer-within-unique.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-android: FIXME(#10381)
#[feature(managed_boxes)];
// compile-flags:-Z extra-debug-info
// debugger:set print pretty off
// debugger:rbreak zzz
// debugger:run
// debugger:finish
// debugger:print *ordinary_unique
// check:$1 = {-1, -2}
// debugger:print managed_within_unique.val->x
// check:$2 = -3
// debugger:print managed_within_unique.val->y->val
// check:$3 = -4
#[allow(unused_variable)];
struct ContainsManaged
{
x: int,
y: @int
}
fn main() {
let ordinary_unique = ~(-1, -2);
// This is a special case: Normally values allocated in the exchange heap are not boxed, unless,
// however, if they contain managed pointers.
// This test case verifies that both cases are handled correctly.
let managed_within_unique = ~ContainsManaged { x: -3, y: @-4 };
zzz();
}
fn
|
() {()}
|
zzz
|
identifier_name
|
managed-pointer-within-unique.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
// compile-flags:-Z extra-debug-info
// debugger:set print pretty off
// debugger:rbreak zzz
// debugger:run
// debugger:finish
// debugger:print *ordinary_unique
// check:$1 = {-1, -2}
// debugger:print managed_within_unique.val->x
// check:$2 = -3
// debugger:print managed_within_unique.val->y->val
// check:$3 = -4
#[allow(unused_variable)];
struct ContainsManaged
{
x: int,
y: @int
}
fn main() {
let ordinary_unique = ~(-1, -2);
// This is a special case: Normally values allocated in the exchange heap are not boxed, unless,
// however, if they contain managed pointers.
// This test case verifies that both cases are handled correctly.
let managed_within_unique = ~ContainsManaged { x: -3, y: @-4 };
zzz();
}
fn zzz() {()}
|
// xfail-android: FIXME(#10381)
#[feature(managed_boxes)];
|
random_line_split
|
translation.rs
|
use register::{Word, SysResult, Original, Modified, StackPointer};
use kernel::{enter, exit};
use process::proot::InfoBag;
use process::tracee::{TraceeStatus, TraceeRestartMethod, Tracee};
pub trait SyscallTranslator {
fn translate_syscall(&mut self, info_bag: &InfoBag);
fn translate_syscall_enter(&mut self, info_bag: &InfoBag);
fn translate_syscall_exit(&mut self);
}
impl SyscallTranslator for Tracee {
/// Retrieves the registers,
/// handles either the enter or exit stage of the system call,
/// and pushes the registers.
fn translate_syscall(&mut self, info_bag: &InfoBag) {
if let Err(error) = self.regs.fetch_regs() {
eprintln!("proot error: Error while fetching regs: {}", error);
return;
}
match self.status {
TraceeStatus::SysEnter => self.translate_syscall_enter(info_bag),
TraceeStatus::SysExit |
TraceeStatus::Error(_) => self.translate_syscall_exit()
};
if let Err(error) = self.regs.push_regs() {
eprintln!("proot error: Error while pushing regs: {}", error);
}
}
fn translate_syscall_enter(&mut self, info_bag: &InfoBag) {
// Never restore original register values at the end of this stage.
self.regs.set_restore_original_regs(false);
// Saving the original registers here.
// It is paramount in order to restore the regs after the exit stage,
// and also as memory in order to remember the original values (like
// the syscall number, in case this one is changed during the enter stage).
self.regs.save_current_regs(Original);
//TODO: notify extensions for SYSCALL_ENTER_START
// status = notify_extensions(tracee, SYSCALL_ENTER_START, 0, 0);
// if (status < 0)
// goto end;
// if (status > 0)
// return 0;
let status = enter::translate(info_bag, self);
//TODO: notify extensions for SYSCALL_ENTER_END event
// status2 = notify_extensions(tracee, SYSCALL_ENTER_END, status, 0);
// if (status2 < 0)
// status = status2;
// Saving the registers potentially modified by the translation.
// It's useful in order to know what the translation did to the registers.
self.regs.save_current_regs(Modified);
// In case of error reported by the translation/extension,
// remember the tracee status for the "exit" stage and avoid
// the actual syscall.
if status.is_err() {
self.regs.cancel_syscall("following error during enter stage, avoid syscall");
self.regs.set(SysResult,
status.unwrap_err().get_errno() as Word,
"following error during enter stage, remember errno for exit stage",
);
self.status = TraceeStatus::Error(status.unwrap_err());
} else {
self.status = TraceeStatus::SysExit;
}
// Restore tracee's stack pointer now if it won't hit
// the sysexit stage (i.e. when seccomp is enabled and
// there's nothing else to do).
if self.restart_how == TraceeRestartMethod::WithoutExitStage {
self.status = TraceeStatus::SysEnter;
self.regs.restore_original(
StackPointer,
"following enter stage, restoring stack pointer early because no exit stage"
);
}
}
|
// status = notify_extensions(tracee, SYSCALL_EXIT_START, 0, 0);
// if (status < 0) {
// poke_reg(tracee, SYSARG_RESULT, (word_t) status);
// goto end;
// }
// if (status > 0)
// return;
if self.status.is_ok() {
exit::translate(self);
} else {
self.regs.set(
SysResult,
self.status.get_errno() as Word,
"following previous error in enter stage, setting errno",
);
}
//TODO: notify extensions for SYSCALL_EXIT_END event
// status = notify_extensions(tracee, SYSCALL_EXIT_END, 0, 0);
// if (status < 0)
// poke_reg(tracee, SYSARG_RESULT, (word_t) status);
// reset the tracee's status
self.status = TraceeStatus::SysEnter;
}
}
|
fn translate_syscall_exit(&mut self) {
// By default, restore original register values at the end of this stage.
self.regs.set_restore_original_regs(true);
//TODO: notify extensions for SYSCALL_EXIT_START event
|
random_line_split
|
translation.rs
|
use register::{Word, SysResult, Original, Modified, StackPointer};
use kernel::{enter, exit};
use process::proot::InfoBag;
use process::tracee::{TraceeStatus, TraceeRestartMethod, Tracee};
pub trait SyscallTranslator {
fn translate_syscall(&mut self, info_bag: &InfoBag);
fn translate_syscall_enter(&mut self, info_bag: &InfoBag);
fn translate_syscall_exit(&mut self);
}
impl SyscallTranslator for Tracee {
/// Retrieves the registers,
/// handles either the enter or exit stage of the system call,
/// and pushes the registers.
fn translate_syscall(&mut self, info_bag: &InfoBag) {
if let Err(error) = self.regs.fetch_regs() {
eprintln!("proot error: Error while fetching regs: {}", error);
return;
}
match self.status {
TraceeStatus::SysEnter => self.translate_syscall_enter(info_bag),
TraceeStatus::SysExit |
TraceeStatus::Error(_) => self.translate_syscall_exit()
};
if let Err(error) = self.regs.push_regs() {
eprintln!("proot error: Error while pushing regs: {}", error);
}
}
fn translate_syscall_enter(&mut self, info_bag: &InfoBag) {
// Never restore original register values at the end of this stage.
self.regs.set_restore_original_regs(false);
// Saving the original registers here.
// It is paramount in order to restore the regs after the exit stage,
// and also as memory in order to remember the original values (like
// the syscall number, in case this one is changed during the enter stage).
self.regs.save_current_regs(Original);
//TODO: notify extensions for SYSCALL_ENTER_START
// status = notify_extensions(tracee, SYSCALL_ENTER_START, 0, 0);
// if (status < 0)
// goto end;
// if (status > 0)
// return 0;
let status = enter::translate(info_bag, self);
//TODO: notify extensions for SYSCALL_ENTER_END event
// status2 = notify_extensions(tracee, SYSCALL_ENTER_END, status, 0);
// if (status2 < 0)
// status = status2;
// Saving the registers potentially modified by the translation.
// It's useful in order to know what the translation did to the registers.
self.regs.save_current_regs(Modified);
// In case of error reported by the translation/extension,
// remember the tracee status for the "exit" stage and avoid
// the actual syscall.
if status.is_err() {
self.regs.cancel_syscall("following error during enter stage, avoid syscall");
self.regs.set(SysResult,
status.unwrap_err().get_errno() as Word,
"following error during enter stage, remember errno for exit stage",
);
self.status = TraceeStatus::Error(status.unwrap_err());
} else {
self.status = TraceeStatus::SysExit;
}
// Restore tracee's stack pointer now if it won't hit
// the sysexit stage (i.e. when seccomp is enabled and
// there's nothing else to do).
if self.restart_how == TraceeRestartMethod::WithoutExitStage {
self.status = TraceeStatus::SysEnter;
self.regs.restore_original(
StackPointer,
"following enter stage, restoring stack pointer early because no exit stage"
);
}
}
fn translate_syscall_exit(&mut self) {
// By default, restore original register values at the end of this stage.
self.regs.set_restore_original_regs(true);
//TODO: notify extensions for SYSCALL_EXIT_START event
// status = notify_extensions(tracee, SYSCALL_EXIT_START, 0, 0);
// if (status < 0) {
// poke_reg(tracee, SYSARG_RESULT, (word_t) status);
// goto end;
// }
// if (status > 0)
// return;
if self.status.is_ok() {
exit::translate(self);
} else
|
//TODO: notify extensions for SYSCALL_EXIT_END event
// status = notify_extensions(tracee, SYSCALL_EXIT_END, 0, 0);
// if (status < 0)
// poke_reg(tracee, SYSARG_RESULT, (word_t) status);
// reset the tracee's status
self.status = TraceeStatus::SysEnter;
}
}
|
{
self.regs.set(
SysResult,
self.status.get_errno() as Word,
"following previous error in enter stage, setting errno",
);
}
|
conditional_block
|
translation.rs
|
use register::{Word, SysResult, Original, Modified, StackPointer};
use kernel::{enter, exit};
use process::proot::InfoBag;
use process::tracee::{TraceeStatus, TraceeRestartMethod, Tracee};
pub trait SyscallTranslator {
fn translate_syscall(&mut self, info_bag: &InfoBag);
fn translate_syscall_enter(&mut self, info_bag: &InfoBag);
fn translate_syscall_exit(&mut self);
}
impl SyscallTranslator for Tracee {
/// Retrieves the registers,
/// handles either the enter or exit stage of the system call,
/// and pushes the registers.
fn translate_syscall(&mut self, info_bag: &InfoBag) {
if let Err(error) = self.regs.fetch_regs() {
eprintln!("proot error: Error while fetching regs: {}", error);
return;
}
match self.status {
TraceeStatus::SysEnter => self.translate_syscall_enter(info_bag),
TraceeStatus::SysExit |
TraceeStatus::Error(_) => self.translate_syscall_exit()
};
if let Err(error) = self.regs.push_regs() {
eprintln!("proot error: Error while pushing regs: {}", error);
}
}
fn translate_syscall_enter(&mut self, info_bag: &InfoBag)
|
// status2 = notify_extensions(tracee, SYSCALL_ENTER_END, status, 0);
// if (status2 < 0)
// status = status2;
// Saving the registers potentially modified by the translation.
// It's useful in order to know what the translation did to the registers.
self.regs.save_current_regs(Modified);
// In case of error reported by the translation/extension,
// remember the tracee status for the "exit" stage and avoid
// the actual syscall.
if status.is_err() {
self.regs.cancel_syscall("following error during enter stage, avoid syscall");
self.regs.set(SysResult,
status.unwrap_err().get_errno() as Word,
"following error during enter stage, remember errno for exit stage",
);
self.status = TraceeStatus::Error(status.unwrap_err());
} else {
self.status = TraceeStatus::SysExit;
}
// Restore tracee's stack pointer now if it won't hit
// the sysexit stage (i.e. when seccomp is enabled and
// there's nothing else to do).
if self.restart_how == TraceeRestartMethod::WithoutExitStage {
self.status = TraceeStatus::SysEnter;
self.regs.restore_original(
StackPointer,
"following enter stage, restoring stack pointer early because no exit stage"
);
}
}
fn translate_syscall_exit(&mut self) {
// By default, restore original register values at the end of this stage.
self.regs.set_restore_original_regs(true);
//TODO: notify extensions for SYSCALL_EXIT_START event
// status = notify_extensions(tracee, SYSCALL_EXIT_START, 0, 0);
// if (status < 0) {
// poke_reg(tracee, SYSARG_RESULT, (word_t) status);
// goto end;
// }
// if (status > 0)
// return;
if self.status.is_ok() {
exit::translate(self);
} else {
self.regs.set(
SysResult,
self.status.get_errno() as Word,
"following previous error in enter stage, setting errno",
);
}
//TODO: notify extensions for SYSCALL_EXIT_END event
// status = notify_extensions(tracee, SYSCALL_EXIT_END, 0, 0);
// if (status < 0)
// poke_reg(tracee, SYSARG_RESULT, (word_t) status);
// reset the tracee's status
self.status = TraceeStatus::SysEnter;
}
}
|
{
// Never restore original register values at the end of this stage.
self.regs.set_restore_original_regs(false);
// Saving the original registers here.
// It is paramount in order to restore the regs after the exit stage,
// and also as memory in order to remember the original values (like
// the syscall number, in case this one is changed during the enter stage).
self.regs.save_current_regs(Original);
//TODO: notify extensions for SYSCALL_ENTER_START
// status = notify_extensions(tracee, SYSCALL_ENTER_START, 0, 0);
// if (status < 0)
// goto end;
// if (status > 0)
// return 0;
let status = enter::translate(info_bag, self);
//TODO: notify extensions for SYSCALL_ENTER_END event
|
identifier_body
|
translation.rs
|
use register::{Word, SysResult, Original, Modified, StackPointer};
use kernel::{enter, exit};
use process::proot::InfoBag;
use process::tracee::{TraceeStatus, TraceeRestartMethod, Tracee};
pub trait SyscallTranslator {
fn translate_syscall(&mut self, info_bag: &InfoBag);
fn translate_syscall_enter(&mut self, info_bag: &InfoBag);
fn translate_syscall_exit(&mut self);
}
impl SyscallTranslator for Tracee {
/// Retrieves the registers,
/// handles either the enter or exit stage of the system call,
/// and pushes the registers.
fn
|
(&mut self, info_bag: &InfoBag) {
if let Err(error) = self.regs.fetch_regs() {
eprintln!("proot error: Error while fetching regs: {}", error);
return;
}
match self.status {
TraceeStatus::SysEnter => self.translate_syscall_enter(info_bag),
TraceeStatus::SysExit |
TraceeStatus::Error(_) => self.translate_syscall_exit()
};
if let Err(error) = self.regs.push_regs() {
eprintln!("proot error: Error while pushing regs: {}", error);
}
}
fn translate_syscall_enter(&mut self, info_bag: &InfoBag) {
// Never restore original register values at the end of this stage.
self.regs.set_restore_original_regs(false);
// Saving the original registers here.
// It is paramount in order to restore the regs after the exit stage,
// and also as memory in order to remember the original values (like
// the syscall number, in case this one is changed during the enter stage).
self.regs.save_current_regs(Original);
//TODO: notify extensions for SYSCALL_ENTER_START
// status = notify_extensions(tracee, SYSCALL_ENTER_START, 0, 0);
// if (status < 0)
// goto end;
// if (status > 0)
// return 0;
let status = enter::translate(info_bag, self);
//TODO: notify extensions for SYSCALL_ENTER_END event
// status2 = notify_extensions(tracee, SYSCALL_ENTER_END, status, 0);
// if (status2 < 0)
// status = status2;
// Saving the registers potentially modified by the translation.
// It's useful in order to know what the translation did to the registers.
self.regs.save_current_regs(Modified);
// In case of error reported by the translation/extension,
// remember the tracee status for the "exit" stage and avoid
// the actual syscall.
if status.is_err() {
self.regs.cancel_syscall("following error during enter stage, avoid syscall");
self.regs.set(SysResult,
status.unwrap_err().get_errno() as Word,
"following error during enter stage, remember errno for exit stage",
);
self.status = TraceeStatus::Error(status.unwrap_err());
} else {
self.status = TraceeStatus::SysExit;
}
// Restore tracee's stack pointer now if it won't hit
// the sysexit stage (i.e. when seccomp is enabled and
// there's nothing else to do).
if self.restart_how == TraceeRestartMethod::WithoutExitStage {
self.status = TraceeStatus::SysEnter;
self.regs.restore_original(
StackPointer,
"following enter stage, restoring stack pointer early because no exit stage"
);
}
}
fn translate_syscall_exit(&mut self) {
// By default, restore original register values at the end of this stage.
self.regs.set_restore_original_regs(true);
//TODO: notify extensions for SYSCALL_EXIT_START event
// status = notify_extensions(tracee, SYSCALL_EXIT_START, 0, 0);
// if (status < 0) {
// poke_reg(tracee, SYSARG_RESULT, (word_t) status);
// goto end;
// }
// if (status > 0)
// return;
if self.status.is_ok() {
exit::translate(self);
} else {
self.regs.set(
SysResult,
self.status.get_errno() as Word,
"following previous error in enter stage, setting errno",
);
}
//TODO: notify extensions for SYSCALL_EXIT_END event
// status = notify_extensions(tracee, SYSCALL_EXIT_END, 0, 0);
// if (status < 0)
// poke_reg(tracee, SYSARG_RESULT, (word_t) status);
// reset the tracee's status
self.status = TraceeStatus::SysEnter;
}
}
|
translate_syscall
|
identifier_name
|
main.rs
|
extern crate regex;
extern crate rustc_demangle;
mod dol;
mod assembler;
use std::fs::File;
use std::io::{BufWriter, BufReader};
use std::io::prelude::*;
use assembler::Assembler;
use regex::Regex;
use rustc_demangle::demangle;
use dol::DolFile;
use assembler::Instruction;
use std::env::args;
const FRAMEWORK_MAP: &'static str = include_str!("../framework.map");
const HEADER: &'static str = r".text section layout
Starting Virtual
address Size address
-----------------------";
fn create_framework_map() {
let regex = Regex::new(r".text.(.+)\s*\n*\s*0x(\w+)\s*\n*\s*0x(\w+)\s*\n*\s*.+\((.+)\)")
.unwrap();
let end_removal = Regex::new(r"^(.+E)\.?\d*$").unwrap();
let mut file = BufReader::new(File::open("../../target/intermediate.elf.map").unwrap());
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
let mut file = BufWriter::new(File::create("../../target/framework.map").unwrap());
writeln!(file, "{}", HEADER).unwrap();
for captures in regex.captures_iter(&content) {
let mangled = captures.at(1).unwrap().trim();
let mangled = end_removal.captures(mangled).map_or(mangled, |c| c.at(1).unwrap());
let fn_name = demangle(mangled).to_string();
let address = captures.at(2).unwrap();
let length = captures.at(3).unwrap();
let source_file = captures.at(4).unwrap();
let length = u32::from_str_radix(length, 16).unwrap();
let mut fn_name: &str = &fn_name.replace(' ', "_")
.replace("()", "Void")
.replace("(", "Tuple<")
.replace(")", ">");
let fn_name_bytes = fn_name.as_bytes();
if fn_name.len() >= 19 && &fn_name_bytes[fn_name.len() - 19..][..3] == b"::h" {
fn_name = &fn_name[..fn_name.len() - 19];
}
if address!= "00000000"
|
}
write!(file, "{}", FRAMEWORK_MAP).unwrap();
}
fn main() {
let mut asm = String::new();
let _ = File::open("../../src/patch.asm")
.expect("Couldn't find \"src/patch.asm\". If you don't need to patch the dol, just \
create an empty file.")
.read_to_string(&mut asm);
let lines = &asm.lines().collect::<Vec<_>>();
let mut assembler = Assembler::new("../../target/intermediate.elf");
let instructions = &assembler.assemble_all_lines(lines);
let mut intermediate = Vec::new();
let _ = File::open("../../target/intermediate.dol")
.expect("Couldn't find \"target/intermediate.dol\". Did you build the project correctly \
using \"make\"?")
.read_to_end(&mut intermediate);
let intermediate = DolFile::new(&intermediate);
if let Some("cheat") = args().skip(1).next().as_ref().map(|x| x as &str) {
write_cheat(intermediate, instructions);
} else {
let mut original = Vec::new();
let _ = File::open("../../game/original.dol")
.expect("Couldn't find \"game/original.dol\". You need to copy the game's main.dol \
there.")
.read_to_end(&mut original);
let original = DolFile::new(&original);
patch_game(original, intermediate, instructions);
}
}
fn patch_game(original: DolFile, intermediate: DolFile, instructions: &[Instruction]) {
let mut original = original;
original.append(intermediate);
original.patch(instructions);
let data = original.to_bytes();
let mut file = File::create("../../game/sys/main.dol")
.expect("Couldn't create \"game/sys/main.dol\". You might need to provide higher \
privileges.");
file.write(&data).expect("Couldn't write the main.dol");
create_framework_map();
}
fn write_cheat(intermediate: DolFile, instructions: &[Instruction]) {
let mut file = File::create("../../cheat.txt")
.expect("Couldn't create \"cheat.txt\". You might need to provide higher \
privileges.");
writeln!(file, "A8000000 00000001").unwrap();
for instruction in instructions {
writeln!(file,
"{:08X} {:08X}",
(instruction.address & 0x01FFFFFF) | 0x04000000,
instruction.data)
.unwrap();
}
for section in intermediate.text_sections.iter().chain(intermediate.data_sections.iter()) {
writeln!(file,
"{:08X} {:08X}",
(section.address & 0x01FFFFFF) | 0x06000000,
section.data.len())
.unwrap();
let line_ender = if section.data.len() % 8 > 0 {
8 - (section.data.len() % 8)
} else {
0
};
for (i, byte) in section.data
.iter()
.chain(std::iter::repeat(&0).take(line_ender))
.enumerate() {
if i % 8 == 4 {
write!(file, " ").unwrap();
}
write!(file, "{:02X}", byte).unwrap();
if i % 8 == 7 {
writeln!(file, "").unwrap();
}
}
}
// for section in intermediate.text_sections.iter().chain(intermediate.data_sections.iter()) {
// let mut address = section.address;
// let line_ender = if section.data.len() % 4 > 0 {
// 4 - (section.data.len() % 4)
// } else {
// 0
// };
// for (i, byte) in section.data.iter().chain(std::iter::repeat(&0).take(line_ender)).enumerate() {
// if i % 4 == 0 {
// write!(file, "{:08X} ", (address & 0x01FFFFFF) | 0x04000000).unwrap();
// }
// write!(file, "{:02X}", byte).unwrap();
// if i % 4 == 3 {
// writeln!(file, "").unwrap();
// }
// address += 1;
// }
// }
}
|
{
writeln!(file,
" 00000000 {:06x} {} 4 {} \t{}",
length,
address,
fn_name,
source_file)
.unwrap();
}
|
conditional_block
|
main.rs
|
extern crate regex;
extern crate rustc_demangle;
mod dol;
mod assembler;
use std::fs::File;
use std::io::{BufWriter, BufReader};
use std::io::prelude::*;
use assembler::Assembler;
use regex::Regex;
use rustc_demangle::demangle;
use dol::DolFile;
use assembler::Instruction;
use std::env::args;
const FRAMEWORK_MAP: &'static str = include_str!("../framework.map");
const HEADER: &'static str = r".text section layout
Starting Virtual
address Size address
-----------------------";
fn create_framework_map() {
let regex = Regex::new(r".text.(.+)\s*\n*\s*0x(\w+)\s*\n*\s*0x(\w+)\s*\n*\s*.+\((.+)\)")
.unwrap();
let end_removal = Regex::new(r"^(.+E)\.?\d*$").unwrap();
let mut file = BufReader::new(File::open("../../target/intermediate.elf.map").unwrap());
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
let mut file = BufWriter::new(File::create("../../target/framework.map").unwrap());
writeln!(file, "{}", HEADER).unwrap();
for captures in regex.captures_iter(&content) {
let mangled = captures.at(1).unwrap().trim();
let mangled = end_removal.captures(mangled).map_or(mangled, |c| c.at(1).unwrap());
let fn_name = demangle(mangled).to_string();
let address = captures.at(2).unwrap();
let length = captures.at(3).unwrap();
let source_file = captures.at(4).unwrap();
let length = u32::from_str_radix(length, 16).unwrap();
let mut fn_name: &str = &fn_name.replace(' ', "_")
.replace("()", "Void")
.replace("(", "Tuple<")
.replace(")", ">");
let fn_name_bytes = fn_name.as_bytes();
if fn_name.len() >= 19 && &fn_name_bytes[fn_name.len() - 19..][..3] == b"::h" {
fn_name = &fn_name[..fn_name.len() - 19];
}
if address!= "00000000" {
writeln!(file,
" 00000000 {:06x} {} 4 {} \t{}",
length,
address,
fn_name,
source_file)
.unwrap();
}
}
write!(file, "{}", FRAMEWORK_MAP).unwrap();
}
fn
|
() {
let mut asm = String::new();
let _ = File::open("../../src/patch.asm")
.expect("Couldn't find \"src/patch.asm\". If you don't need to patch the dol, just \
create an empty file.")
.read_to_string(&mut asm);
let lines = &asm.lines().collect::<Vec<_>>();
let mut assembler = Assembler::new("../../target/intermediate.elf");
let instructions = &assembler.assemble_all_lines(lines);
let mut intermediate = Vec::new();
let _ = File::open("../../target/intermediate.dol")
.expect("Couldn't find \"target/intermediate.dol\". Did you build the project correctly \
using \"make\"?")
.read_to_end(&mut intermediate);
let intermediate = DolFile::new(&intermediate);
if let Some("cheat") = args().skip(1).next().as_ref().map(|x| x as &str) {
write_cheat(intermediate, instructions);
} else {
let mut original = Vec::new();
let _ = File::open("../../game/original.dol")
.expect("Couldn't find \"game/original.dol\". You need to copy the game's main.dol \
there.")
.read_to_end(&mut original);
let original = DolFile::new(&original);
patch_game(original, intermediate, instructions);
}
}
fn patch_game(original: DolFile, intermediate: DolFile, instructions: &[Instruction]) {
let mut original = original;
original.append(intermediate);
original.patch(instructions);
let data = original.to_bytes();
let mut file = File::create("../../game/sys/main.dol")
.expect("Couldn't create \"game/sys/main.dol\". You might need to provide higher \
privileges.");
file.write(&data).expect("Couldn't write the main.dol");
create_framework_map();
}
fn write_cheat(intermediate: DolFile, instructions: &[Instruction]) {
let mut file = File::create("../../cheat.txt")
.expect("Couldn't create \"cheat.txt\". You might need to provide higher \
privileges.");
writeln!(file, "A8000000 00000001").unwrap();
for instruction in instructions {
writeln!(file,
"{:08X} {:08X}",
(instruction.address & 0x01FFFFFF) | 0x04000000,
instruction.data)
.unwrap();
}
for section in intermediate.text_sections.iter().chain(intermediate.data_sections.iter()) {
writeln!(file,
"{:08X} {:08X}",
(section.address & 0x01FFFFFF) | 0x06000000,
section.data.len())
.unwrap();
let line_ender = if section.data.len() % 8 > 0 {
8 - (section.data.len() % 8)
} else {
0
};
for (i, byte) in section.data
.iter()
.chain(std::iter::repeat(&0).take(line_ender))
.enumerate() {
if i % 8 == 4 {
write!(file, " ").unwrap();
}
write!(file, "{:02X}", byte).unwrap();
if i % 8 == 7 {
writeln!(file, "").unwrap();
}
}
}
// for section in intermediate.text_sections.iter().chain(intermediate.data_sections.iter()) {
// let mut address = section.address;
// let line_ender = if section.data.len() % 4 > 0 {
// 4 - (section.data.len() % 4)
// } else {
// 0
// };
// for (i, byte) in section.data.iter().chain(std::iter::repeat(&0).take(line_ender)).enumerate() {
// if i % 4 == 0 {
// write!(file, "{:08X} ", (address & 0x01FFFFFF) | 0x04000000).unwrap();
// }
// write!(file, "{:02X}", byte).unwrap();
// if i % 4 == 3 {
// writeln!(file, "").unwrap();
// }
// address += 1;
// }
// }
}
|
main
|
identifier_name
|
main.rs
|
extern crate rustc_demangle;
mod dol;
mod assembler;
use std::fs::File;
use std::io::{BufWriter, BufReader};
use std::io::prelude::*;
use assembler::Assembler;
use regex::Regex;
use rustc_demangle::demangle;
use dol::DolFile;
use assembler::Instruction;
use std::env::args;
const FRAMEWORK_MAP: &'static str = include_str!("../framework.map");
const HEADER: &'static str = r".text section layout
Starting Virtual
address Size address
-----------------------";
fn create_framework_map() {
let regex = Regex::new(r".text.(.+)\s*\n*\s*0x(\w+)\s*\n*\s*0x(\w+)\s*\n*\s*.+\((.+)\)")
.unwrap();
let end_removal = Regex::new(r"^(.+E)\.?\d*$").unwrap();
let mut file = BufReader::new(File::open("../../target/intermediate.elf.map").unwrap());
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
let mut file = BufWriter::new(File::create("../../target/framework.map").unwrap());
writeln!(file, "{}", HEADER).unwrap();
for captures in regex.captures_iter(&content) {
let mangled = captures.at(1).unwrap().trim();
let mangled = end_removal.captures(mangled).map_or(mangled, |c| c.at(1).unwrap());
let fn_name = demangle(mangled).to_string();
let address = captures.at(2).unwrap();
let length = captures.at(3).unwrap();
let source_file = captures.at(4).unwrap();
let length = u32::from_str_radix(length, 16).unwrap();
let mut fn_name: &str = &fn_name.replace(' ', "_")
.replace("()", "Void")
.replace("(", "Tuple<")
.replace(")", ">");
let fn_name_bytes = fn_name.as_bytes();
if fn_name.len() >= 19 && &fn_name_bytes[fn_name.len() - 19..][..3] == b"::h" {
fn_name = &fn_name[..fn_name.len() - 19];
}
if address!= "00000000" {
writeln!(file,
" 00000000 {:06x} {} 4 {} \t{}",
length,
address,
fn_name,
source_file)
.unwrap();
}
}
write!(file, "{}", FRAMEWORK_MAP).unwrap();
}
fn main() {
let mut asm = String::new();
let _ = File::open("../../src/patch.asm")
.expect("Couldn't find \"src/patch.asm\". If you don't need to patch the dol, just \
create an empty file.")
.read_to_string(&mut asm);
let lines = &asm.lines().collect::<Vec<_>>();
let mut assembler = Assembler::new("../../target/intermediate.elf");
let instructions = &assembler.assemble_all_lines(lines);
let mut intermediate = Vec::new();
let _ = File::open("../../target/intermediate.dol")
.expect("Couldn't find \"target/intermediate.dol\". Did you build the project correctly \
using \"make\"?")
.read_to_end(&mut intermediate);
let intermediate = DolFile::new(&intermediate);
if let Some("cheat") = args().skip(1).next().as_ref().map(|x| x as &str) {
write_cheat(intermediate, instructions);
} else {
let mut original = Vec::new();
let _ = File::open("../../game/original.dol")
.expect("Couldn't find \"game/original.dol\". You need to copy the game's main.dol \
there.")
.read_to_end(&mut original);
let original = DolFile::new(&original);
patch_game(original, intermediate, instructions);
}
}
fn patch_game(original: DolFile, intermediate: DolFile, instructions: &[Instruction]) {
let mut original = original;
original.append(intermediate);
original.patch(instructions);
let data = original.to_bytes();
let mut file = File::create("../../game/sys/main.dol")
.expect("Couldn't create \"game/sys/main.dol\". You might need to provide higher \
privileges.");
file.write(&data).expect("Couldn't write the main.dol");
create_framework_map();
}
fn write_cheat(intermediate: DolFile, instructions: &[Instruction]) {
let mut file = File::create("../../cheat.txt")
.expect("Couldn't create \"cheat.txt\". You might need to provide higher \
privileges.");
writeln!(file, "A8000000 00000001").unwrap();
for instruction in instructions {
writeln!(file,
"{:08X} {:08X}",
(instruction.address & 0x01FFFFFF) | 0x04000000,
instruction.data)
.unwrap();
}
for section in intermediate.text_sections.iter().chain(intermediate.data_sections.iter()) {
writeln!(file,
"{:08X} {:08X}",
(section.address & 0x01FFFFFF) | 0x06000000,
section.data.len())
.unwrap();
let line_ender = if section.data.len() % 8 > 0 {
8 - (section.data.len() % 8)
} else {
0
};
for (i, byte) in section.data
.iter()
.chain(std::iter::repeat(&0).take(line_ender))
.enumerate() {
if i % 8 == 4 {
write!(file, " ").unwrap();
}
write!(file, "{:02X}", byte).unwrap();
if i % 8 == 7 {
writeln!(file, "").unwrap();
}
}
}
// for section in intermediate.text_sections.iter().chain(intermediate.data_sections.iter()) {
// let mut address = section.address;
// let line_ender = if section.data.len() % 4 > 0 {
// 4 - (section.data.len() % 4)
// } else {
// 0
// };
// for (i, byte) in section.data.iter().chain(std::iter::repeat(&0).take(line_ender)).enumerate() {
// if i % 4 == 0 {
// write!(file, "{:08X} ", (address & 0x01FFFFFF) | 0x04000000).unwrap();
// }
// write!(file, "{:02X}", byte).unwrap();
// if i % 4 == 3 {
// writeln!(file, "").unwrap();
// }
// address += 1;
// }
// }
}
|
extern crate regex;
|
random_line_split
|
|
main.rs
|
extern crate regex;
extern crate rustc_demangle;
mod dol;
mod assembler;
use std::fs::File;
use std::io::{BufWriter, BufReader};
use std::io::prelude::*;
use assembler::Assembler;
use regex::Regex;
use rustc_demangle::demangle;
use dol::DolFile;
use assembler::Instruction;
use std::env::args;
const FRAMEWORK_MAP: &'static str = include_str!("../framework.map");
const HEADER: &'static str = r".text section layout
Starting Virtual
address Size address
-----------------------";
fn create_framework_map() {
let regex = Regex::new(r".text.(.+)\s*\n*\s*0x(\w+)\s*\n*\s*0x(\w+)\s*\n*\s*.+\((.+)\)")
.unwrap();
let end_removal = Regex::new(r"^(.+E)\.?\d*$").unwrap();
let mut file = BufReader::new(File::open("../../target/intermediate.elf.map").unwrap());
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
let mut file = BufWriter::new(File::create("../../target/framework.map").unwrap());
writeln!(file, "{}", HEADER).unwrap();
for captures in regex.captures_iter(&content) {
let mangled = captures.at(1).unwrap().trim();
let mangled = end_removal.captures(mangled).map_or(mangled, |c| c.at(1).unwrap());
let fn_name = demangle(mangled).to_string();
let address = captures.at(2).unwrap();
let length = captures.at(3).unwrap();
let source_file = captures.at(4).unwrap();
let length = u32::from_str_radix(length, 16).unwrap();
let mut fn_name: &str = &fn_name.replace(' ', "_")
.replace("()", "Void")
.replace("(", "Tuple<")
.replace(")", ">");
let fn_name_bytes = fn_name.as_bytes();
if fn_name.len() >= 19 && &fn_name_bytes[fn_name.len() - 19..][..3] == b"::h" {
fn_name = &fn_name[..fn_name.len() - 19];
}
if address!= "00000000" {
writeln!(file,
" 00000000 {:06x} {} 4 {} \t{}",
length,
address,
fn_name,
source_file)
.unwrap();
}
}
write!(file, "{}", FRAMEWORK_MAP).unwrap();
}
fn main() {
let mut asm = String::new();
let _ = File::open("../../src/patch.asm")
.expect("Couldn't find \"src/patch.asm\". If you don't need to patch the dol, just \
create an empty file.")
.read_to_string(&mut asm);
let lines = &asm.lines().collect::<Vec<_>>();
let mut assembler = Assembler::new("../../target/intermediate.elf");
let instructions = &assembler.assemble_all_lines(lines);
let mut intermediate = Vec::new();
let _ = File::open("../../target/intermediate.dol")
.expect("Couldn't find \"target/intermediate.dol\". Did you build the project correctly \
using \"make\"?")
.read_to_end(&mut intermediate);
let intermediate = DolFile::new(&intermediate);
if let Some("cheat") = args().skip(1).next().as_ref().map(|x| x as &str) {
write_cheat(intermediate, instructions);
} else {
let mut original = Vec::new();
let _ = File::open("../../game/original.dol")
.expect("Couldn't find \"game/original.dol\". You need to copy the game's main.dol \
there.")
.read_to_end(&mut original);
let original = DolFile::new(&original);
patch_game(original, intermediate, instructions);
}
}
fn patch_game(original: DolFile, intermediate: DolFile, instructions: &[Instruction]) {
let mut original = original;
original.append(intermediate);
original.patch(instructions);
let data = original.to_bytes();
let mut file = File::create("../../game/sys/main.dol")
.expect("Couldn't create \"game/sys/main.dol\". You might need to provide higher \
privileges.");
file.write(&data).expect("Couldn't write the main.dol");
create_framework_map();
}
fn write_cheat(intermediate: DolFile, instructions: &[Instruction])
|
.unwrap();
let line_ender = if section.data.len() % 8 > 0 {
8 - (section.data.len() % 8)
} else {
0
};
for (i, byte) in section.data
.iter()
.chain(std::iter::repeat(&0).take(line_ender))
.enumerate() {
if i % 8 == 4 {
write!(file, " ").unwrap();
}
write!(file, "{:02X}", byte).unwrap();
if i % 8 == 7 {
writeln!(file, "").unwrap();
}
}
}
// for section in intermediate.text_sections.iter().chain(intermediate.data_sections.iter()) {
// let mut address = section.address;
// let line_ender = if section.data.len() % 4 > 0 {
// 4 - (section.data.len() % 4)
// } else {
// 0
// };
// for (i, byte) in section.data.iter().chain(std::iter::repeat(&0).take(line_ender)).enumerate() {
// if i % 4 == 0 {
// write!(file, "{:08X} ", (address & 0x01FFFFFF) | 0x04000000).unwrap();
// }
// write!(file, "{:02X}", byte).unwrap();
// if i % 4 == 3 {
// writeln!(file, "").unwrap();
// }
// address += 1;
// }
// }
}
|
{
let mut file = File::create("../../cheat.txt")
.expect("Couldn't create \"cheat.txt\". You might need to provide higher \
privileges.");
writeln!(file, "A8000000 00000001").unwrap();
for instruction in instructions {
writeln!(file,
"{:08X} {:08X}",
(instruction.address & 0x01FFFFFF) | 0x04000000,
instruction.data)
.unwrap();
}
for section in intermediate.text_sections.iter().chain(intermediate.data_sections.iter()) {
writeln!(file,
"{:08X} {:08X}",
(section.address & 0x01FFFFFF) | 0x06000000,
section.data.len())
|
identifier_body
|
dropck-eyepatch-extern-crate.rs
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
|
extern crate dropck_eyepatch_extern_crate as other;
use other::{Dt,Dr,Pt,Pr,St,Sr};
fn main() {
use std::cell::RefCell;
struct CheckOnDrop(RefCell<String>, &'static str);
impl Drop for CheckOnDrop {
fn drop(&mut self) { assert_eq!(*self.0.borrow(), self.1); }
}
let c_long;
let (c, dt, dr, pt, pr, st, sr)
: (CheckOnDrop, Dt<_>, Dr<_>, Pt<_, _>, Pr<_>, St<_>, Sr<_>);
c_long = CheckOnDrop(RefCell::new("c_long".to_string()),
"c_long|pr|pt|dr|dt");
c = CheckOnDrop(RefCell::new("c".to_string()),
"c");
// No error: sufficiently long-lived state can be referenced in dtors
dt = Dt("dt", &c_long.0);
dr = Dr("dr", &c_long.0);
// No error: Drop impl asserts.1 (A and &'a _) are not accessed
pt = Pt("pt", &c.0, &c_long.0);
pr = Pr("pr", &c.0, &c_long.0);
// No error: St and Sr have no destructor.
st = St("st", &c.0);
sr = Sr("sr", &c.0);
println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0));
assert_eq!(*c_long.0.borrow(), "c_long");
assert_eq!(*c.0.borrow(), "c");
}
|
// except according to those terms.
// run-pass
// aux-build:dropck_eyepatch_extern_crate.rs
|
random_line_split
|
dropck-eyepatch-extern-crate.rs
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// aux-build:dropck_eyepatch_extern_crate.rs
extern crate dropck_eyepatch_extern_crate as other;
use other::{Dt,Dr,Pt,Pr,St,Sr};
fn main() {
use std::cell::RefCell;
struct CheckOnDrop(RefCell<String>, &'static str);
impl Drop for CheckOnDrop {
fn drop(&mut self)
|
}
let c_long;
let (c, dt, dr, pt, pr, st, sr)
: (CheckOnDrop, Dt<_>, Dr<_>, Pt<_, _>, Pr<_>, St<_>, Sr<_>);
c_long = CheckOnDrop(RefCell::new("c_long".to_string()),
"c_long|pr|pt|dr|dt");
c = CheckOnDrop(RefCell::new("c".to_string()),
"c");
// No error: sufficiently long-lived state can be referenced in dtors
dt = Dt("dt", &c_long.0);
dr = Dr("dr", &c_long.0);
// No error: Drop impl asserts.1 (A and &'a _) are not accessed
pt = Pt("pt", &c.0, &c_long.0);
pr = Pr("pr", &c.0, &c_long.0);
// No error: St and Sr have no destructor.
st = St("st", &c.0);
sr = Sr("sr", &c.0);
println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0));
assert_eq!(*c_long.0.borrow(), "c_long");
assert_eq!(*c.0.borrow(), "c");
}
|
{ assert_eq!(*self.0.borrow(), self.1); }
|
identifier_body
|
dropck-eyepatch-extern-crate.rs
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// aux-build:dropck_eyepatch_extern_crate.rs
extern crate dropck_eyepatch_extern_crate as other;
use other::{Dt,Dr,Pt,Pr,St,Sr};
fn main() {
use std::cell::RefCell;
struct CheckOnDrop(RefCell<String>, &'static str);
impl Drop for CheckOnDrop {
fn
|
(&mut self) { assert_eq!(*self.0.borrow(), self.1); }
}
let c_long;
let (c, dt, dr, pt, pr, st, sr)
: (CheckOnDrop, Dt<_>, Dr<_>, Pt<_, _>, Pr<_>, St<_>, Sr<_>);
c_long = CheckOnDrop(RefCell::new("c_long".to_string()),
"c_long|pr|pt|dr|dt");
c = CheckOnDrop(RefCell::new("c".to_string()),
"c");
// No error: sufficiently long-lived state can be referenced in dtors
dt = Dt("dt", &c_long.0);
dr = Dr("dr", &c_long.0);
// No error: Drop impl asserts.1 (A and &'a _) are not accessed
pt = Pt("pt", &c.0, &c_long.0);
pr = Pr("pr", &c.0, &c_long.0);
// No error: St and Sr have no destructor.
st = St("st", &c.0);
sr = Sr("sr", &c.0);
println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0));
assert_eq!(*c_long.0.borrow(), "c_long");
assert_eq!(*c.0.borrow(), "c");
}
|
drop
|
identifier_name
|
method-ambig-one-trait-unknown-int-type.rs
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we invoking `foo()` successfully resolves to the trait `foo`
// (prompting the mismatched types error) but does not influence the choice
// of what kind of `Vec` we have, eventually leading to a type error.
trait foo {
fn foo(&self) -> isize;
}
impl foo for Vec<usize> {
fn foo(&self) -> isize {1}
}
impl foo for Vec<isize> {
fn
|
(&self) -> isize {2}
}
// This is very hokey: we have heuristics to suppress messages about
// type annotations required. But placing these two bits of code into
// distinct functions, in this order, causes us to print out both
// errors I'd like to see.
fn m1() {
// we couldn't infer the type of the vector just based on calling foo()...
let mut x = Vec::new();
//~^ ERROR type annotations or generic parameter binding required
x.foo();
}
fn m2() {
let mut x = Vec::new();
//...but we still resolved `foo()` to the trait and hence know the return type.
let y: usize = x.foo(); //~ ERROR mismatched types
}
fn main() { }
|
foo
|
identifier_name
|
method-ambig-one-trait-unknown-int-type.rs
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we invoking `foo()` successfully resolves to the trait `foo`
// (prompting the mismatched types error) but does not influence the choice
// of what kind of `Vec` we have, eventually leading to a type error.
trait foo {
fn foo(&self) -> isize;
}
impl foo for Vec<usize> {
fn foo(&self) -> isize
|
}
impl foo for Vec<isize> {
fn foo(&self) -> isize {2}
}
// This is very hokey: we have heuristics to suppress messages about
// type annotations required. But placing these two bits of code into
// distinct functions, in this order, causes us to print out both
// errors I'd like to see.
fn m1() {
// we couldn't infer the type of the vector just based on calling foo()...
let mut x = Vec::new();
//~^ ERROR type annotations or generic parameter binding required
x.foo();
}
fn m2() {
let mut x = Vec::new();
//...but we still resolved `foo()` to the trait and hence know the return type.
let y: usize = x.foo(); //~ ERROR mismatched types
}
fn main() { }
|
{1}
|
identifier_body
|
method-ambig-one-trait-unknown-int-type.rs
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we invoking `foo()` successfully resolves to the trait `foo`
// (prompting the mismatched types error) but does not influence the choice
// of what kind of `Vec` we have, eventually leading to a type error.
trait foo {
fn foo(&self) -> isize;
}
impl foo for Vec<usize> {
fn foo(&self) -> isize {1}
}
|
// This is very hokey: we have heuristics to suppress messages about
// type annotations required. But placing these two bits of code into
// distinct functions, in this order, causes us to print out both
// errors I'd like to see.
fn m1() {
// we couldn't infer the type of the vector just based on calling foo()...
let mut x = Vec::new();
//~^ ERROR type annotations or generic parameter binding required
x.foo();
}
fn m2() {
let mut x = Vec::new();
//...but we still resolved `foo()` to the trait and hence know the return type.
let y: usize = x.foo(); //~ ERROR mismatched types
}
fn main() { }
|
impl foo for Vec<isize> {
fn foo(&self) -> isize {2}
}
|
random_line_split
|
small-enum-range-edge.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
|
// this is for the wrapping_add call below.
#![feature(core)]
/*!
* Tests the range assertion wraparound case in trans::middle::adt::load_discr.
*/
#[repr(u8)]
#[derive(Copy, Clone)]
enum Eu { Lu = 0, Hu = 255 }
static CLu: Eu = Eu::Lu;
static CHu: Eu = Eu::Hu;
#[repr(i8)]
#[derive(Copy, Clone)]
enum Es { Ls = -128, Hs = 127 }
static CLs: Es = Es::Ls;
static CHs: Es = Es::Hs;
pub fn main() {
assert_eq!((Eu::Hu as u8).wrapping_add(1), Eu::Lu as u8);
assert_eq!((Es::Hs as i8).wrapping_add(1), Es::Ls as i8);
assert_eq!(CLu as u8, Eu::Lu as u8);
assert_eq!(CHu as u8, Eu::Hu as u8);
assert_eq!(CLs as i8, Es::Ls as i8);
assert_eq!(CHs as i8, Es::Hs as i8);
}
|
// except according to those terms.
|
random_line_split
|
small-enum-range-edge.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// this is for the wrapping_add call below.
#![feature(core)]
/*!
* Tests the range assertion wraparound case in trans::middle::adt::load_discr.
*/
#[repr(u8)]
#[derive(Copy, Clone)]
enum
|
{ Lu = 0, Hu = 255 }
static CLu: Eu = Eu::Lu;
static CHu: Eu = Eu::Hu;
#[repr(i8)]
#[derive(Copy, Clone)]
enum Es { Ls = -128, Hs = 127 }
static CLs: Es = Es::Ls;
static CHs: Es = Es::Hs;
pub fn main() {
assert_eq!((Eu::Hu as u8).wrapping_add(1), Eu::Lu as u8);
assert_eq!((Es::Hs as i8).wrapping_add(1), Es::Ls as i8);
assert_eq!(CLu as u8, Eu::Lu as u8);
assert_eq!(CHu as u8, Eu::Hu as u8);
assert_eq!(CLs as i8, Es::Ls as i8);
assert_eq!(CHs as i8, Es::Hs as i8);
}
|
Eu
|
identifier_name
|
small-enum-range-edge.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// this is for the wrapping_add call below.
#![feature(core)]
/*!
* Tests the range assertion wraparound case in trans::middle::adt::load_discr.
*/
#[repr(u8)]
#[derive(Copy, Clone)]
enum Eu { Lu = 0, Hu = 255 }
static CLu: Eu = Eu::Lu;
static CHu: Eu = Eu::Hu;
#[repr(i8)]
#[derive(Copy, Clone)]
enum Es { Ls = -128, Hs = 127 }
static CLs: Es = Es::Ls;
static CHs: Es = Es::Hs;
pub fn main()
|
{
assert_eq!((Eu::Hu as u8).wrapping_add(1), Eu::Lu as u8);
assert_eq!((Es::Hs as i8).wrapping_add(1), Es::Ls as i8);
assert_eq!(CLu as u8, Eu::Lu as u8);
assert_eq!(CHu as u8, Eu::Hu as u8);
assert_eq!(CLs as i8, Es::Ls as i8);
assert_eq!(CHs as i8, Es::Hs as i8);
}
|
identifier_body
|
|
hello_canvas.rs
|
//! Basic hello world example, drawing
//! to a canvas.
use ggez::event;
use ggez::graphics::{self, Color};
use ggez::{Context, GameResult};
use glam::*;
use std::env;
use std::path;
struct MainState {
text: graphics::Text,
canvas: graphics::Canvas,
frames: usize,
draw_with_canvas: bool,
}
impl MainState {
fn new(ctx: &mut Context) -> GameResult<MainState> {
// The ttf file will be in your resources directory. Later, we
// will mount that directory so we can omit it in the path here.
let font = graphics::Font::new(ctx, "/LiberationMono-Regular.ttf")?;
let text = graphics::Text::new(("Hello world!", font, 48.0));
let canvas = graphics::Canvas::with_window_size(ctx)?;
let s = MainState {
text,
canvas,
draw_with_canvas: false,
frames: 0,
};
Ok(s)
}
}
impl event::EventHandler<ggez::GameError> for MainState {
fn update(&mut self, _ctx: &mut Context) -> GameResult {
Ok(())
}
fn draw(&mut self, ctx: &mut Context) -> GameResult {
let dest_point = Vec2::new(10.0, 10.0);
if self.draw_with_canvas {
println!("Drawing with canvas");
graphics::clear(ctx, graphics::Color::from((64, 0, 0, 0)));
graphics::set_canvas(ctx, Some(&self.canvas));
graphics::clear(ctx, graphics::Color::from((255, 255, 255, 128)));
graphics::draw(
ctx,
&self.text,
graphics::DrawParam::new()
.dest(dest_point)
.color(Color::from((0, 0, 0, 255))),
)?;
graphics::set_canvas(ctx, None);
graphics::draw(
ctx,
&self.canvas,
graphics::DrawParam::new().color(Color::from((255, 255, 255, 128))),
)?;
} else {
println!("Drawing without canvas");
graphics::set_canvas(ctx, None);
graphics::clear(ctx, [0.25, 0.0, 0.0, 1.0].into());
graphics::draw(
ctx,
&self.text,
graphics::DrawParam::new()
.dest(dest_point)
.color(Color::from((192, 128, 64, 255))),
)?;
}
graphics::present(ctx)?;
self.frames += 1;
if (self.frames % 100) == 0 {
println!("FPS: {}", ggez::timer::fps(ctx));
}
Ok(())
}
fn key_down_event(
&mut self,
_ctx: &mut Context,
_keycode: ggez::event::KeyCode,
_keymod: ggez::event::KeyMods,
repeat: bool,
) {
if!repeat {
self.draw_with_canvas =!self.draw_with_canvas;
println!("Canvas on: {}", self.draw_with_canvas);
}
}
}
pub fn main() -> GameResult {
let resource_dir = if let Ok(manifest_dir) = env::var("CARGO_MANIFEST_DIR")
|
else {
path::PathBuf::from("./resources")
};
let cb = ggez::ContextBuilder::new("hello_canvas", "ggez").add_resource_path(resource_dir);
let (mut ctx, event_loop) = cb.build()?;
let state = MainState::new(&mut ctx)?;
event::run(ctx, event_loop, state)
}
|
{
let mut path = path::PathBuf::from(manifest_dir);
path.push("resources");
path
}
|
conditional_block
|
hello_canvas.rs
|
//! Basic hello world example, drawing
//! to a canvas.
use ggez::event;
use ggez::graphics::{self, Color};
use ggez::{Context, GameResult};
use glam::*;
use std::env;
use std::path;
struct MainState {
text: graphics::Text,
canvas: graphics::Canvas,
frames: usize,
draw_with_canvas: bool,
}
impl MainState {
fn new(ctx: &mut Context) -> GameResult<MainState> {
// The ttf file will be in your resources directory. Later, we
// will mount that directory so we can omit it in the path here.
let font = graphics::Font::new(ctx, "/LiberationMono-Regular.ttf")?;
let text = graphics::Text::new(("Hello world!", font, 48.0));
let canvas = graphics::Canvas::with_window_size(ctx)?;
let s = MainState {
text,
canvas,
draw_with_canvas: false,
frames: 0,
};
Ok(s)
}
}
impl event::EventHandler<ggez::GameError> for MainState {
fn update(&mut self, _ctx: &mut Context) -> GameResult
|
fn draw(&mut self, ctx: &mut Context) -> GameResult {
let dest_point = Vec2::new(10.0, 10.0);
if self.draw_with_canvas {
println!("Drawing with canvas");
graphics::clear(ctx, graphics::Color::from((64, 0, 0, 0)));
graphics::set_canvas(ctx, Some(&self.canvas));
graphics::clear(ctx, graphics::Color::from((255, 255, 255, 128)));
graphics::draw(
ctx,
&self.text,
graphics::DrawParam::new()
.dest(dest_point)
.color(Color::from((0, 0, 0, 255))),
)?;
graphics::set_canvas(ctx, None);
graphics::draw(
ctx,
&self.canvas,
graphics::DrawParam::new().color(Color::from((255, 255, 255, 128))),
)?;
} else {
println!("Drawing without canvas");
graphics::set_canvas(ctx, None);
graphics::clear(ctx, [0.25, 0.0, 0.0, 1.0].into());
graphics::draw(
ctx,
&self.text,
graphics::DrawParam::new()
.dest(dest_point)
.color(Color::from((192, 128, 64, 255))),
)?;
}
graphics::present(ctx)?;
self.frames += 1;
if (self.frames % 100) == 0 {
println!("FPS: {}", ggez::timer::fps(ctx));
}
Ok(())
}
fn key_down_event(
&mut self,
_ctx: &mut Context,
_keycode: ggez::event::KeyCode,
_keymod: ggez::event::KeyMods,
repeat: bool,
) {
if!repeat {
self.draw_with_canvas =!self.draw_with_canvas;
println!("Canvas on: {}", self.draw_with_canvas);
}
}
}
pub fn main() -> GameResult {
let resource_dir = if let Ok(manifest_dir) = env::var("CARGO_MANIFEST_DIR") {
let mut path = path::PathBuf::from(manifest_dir);
path.push("resources");
path
} else {
path::PathBuf::from("./resources")
};
let cb = ggez::ContextBuilder::new("hello_canvas", "ggez").add_resource_path(resource_dir);
let (mut ctx, event_loop) = cb.build()?;
let state = MainState::new(&mut ctx)?;
event::run(ctx, event_loop, state)
}
|
{
Ok(())
}
|
identifier_body
|
hello_canvas.rs
|
//! Basic hello world example, drawing
//! to a canvas.
use ggez::event;
use ggez::graphics::{self, Color};
use ggez::{Context, GameResult};
use glam::*;
use std::env;
use std::path;
struct MainState {
text: graphics::Text,
canvas: graphics::Canvas,
frames: usize,
draw_with_canvas: bool,
}
impl MainState {
fn new(ctx: &mut Context) -> GameResult<MainState> {
// The ttf file will be in your resources directory. Later, we
// will mount that directory so we can omit it in the path here.
let font = graphics::Font::new(ctx, "/LiberationMono-Regular.ttf")?;
let text = graphics::Text::new(("Hello world!", font, 48.0));
let canvas = graphics::Canvas::with_window_size(ctx)?;
let s = MainState {
text,
canvas,
draw_with_canvas: false,
frames: 0,
};
Ok(s)
}
}
impl event::EventHandler<ggez::GameError> for MainState {
fn update(&mut self, _ctx: &mut Context) -> GameResult {
Ok(())
}
fn draw(&mut self, ctx: &mut Context) -> GameResult {
let dest_point = Vec2::new(10.0, 10.0);
if self.draw_with_canvas {
println!("Drawing with canvas");
graphics::clear(ctx, graphics::Color::from((64, 0, 0, 0)));
graphics::set_canvas(ctx, Some(&self.canvas));
graphics::clear(ctx, graphics::Color::from((255, 255, 255, 128)));
graphics::draw(
ctx,
&self.text,
graphics::DrawParam::new()
.dest(dest_point)
.color(Color::from((0, 0, 0, 255))),
)?;
graphics::set_canvas(ctx, None);
graphics::draw(
ctx,
&self.canvas,
graphics::DrawParam::new().color(Color::from((255, 255, 255, 128))),
)?;
} else {
println!("Drawing without canvas");
graphics::set_canvas(ctx, None);
graphics::clear(ctx, [0.25, 0.0, 0.0, 1.0].into());
graphics::draw(
ctx,
&self.text,
graphics::DrawParam::new()
.dest(dest_point)
.color(Color::from((192, 128, 64, 255))),
)?;
}
graphics::present(ctx)?;
self.frames += 1;
if (self.frames % 100) == 0 {
println!("FPS: {}", ggez::timer::fps(ctx));
}
Ok(())
}
fn
|
(
&mut self,
_ctx: &mut Context,
_keycode: ggez::event::KeyCode,
_keymod: ggez::event::KeyMods,
repeat: bool,
) {
if!repeat {
self.draw_with_canvas =!self.draw_with_canvas;
println!("Canvas on: {}", self.draw_with_canvas);
}
}
}
pub fn main() -> GameResult {
let resource_dir = if let Ok(manifest_dir) = env::var("CARGO_MANIFEST_DIR") {
let mut path = path::PathBuf::from(manifest_dir);
path.push("resources");
path
} else {
path::PathBuf::from("./resources")
};
let cb = ggez::ContextBuilder::new("hello_canvas", "ggez").add_resource_path(resource_dir);
let (mut ctx, event_loop) = cb.build()?;
let state = MainState::new(&mut ctx)?;
event::run(ctx, event_loop, state)
}
|
key_down_event
|
identifier_name
|
hello_canvas.rs
|
//! Basic hello world example, drawing
//! to a canvas.
use ggez::event;
use ggez::graphics::{self, Color};
use ggez::{Context, GameResult};
use glam::*;
use std::env;
use std::path;
struct MainState {
text: graphics::Text,
canvas: graphics::Canvas,
frames: usize,
draw_with_canvas: bool,
}
impl MainState {
fn new(ctx: &mut Context) -> GameResult<MainState> {
// The ttf file will be in your resources directory. Later, we
// will mount that directory so we can omit it in the path here.
let font = graphics::Font::new(ctx, "/LiberationMono-Regular.ttf")?;
let text = graphics::Text::new(("Hello world!", font, 48.0));
let canvas = graphics::Canvas::with_window_size(ctx)?;
let s = MainState {
text,
canvas,
draw_with_canvas: false,
frames: 0,
};
Ok(s)
}
}
impl event::EventHandler<ggez::GameError> for MainState {
fn update(&mut self, _ctx: &mut Context) -> GameResult {
Ok(())
}
fn draw(&mut self, ctx: &mut Context) -> GameResult {
let dest_point = Vec2::new(10.0, 10.0);
if self.draw_with_canvas {
println!("Drawing with canvas");
graphics::clear(ctx, graphics::Color::from((64, 0, 0, 0)));
graphics::set_canvas(ctx, Some(&self.canvas));
graphics::clear(ctx, graphics::Color::from((255, 255, 255, 128)));
graphics::draw(
ctx,
&self.text,
graphics::DrawParam::new()
.dest(dest_point)
.color(Color::from((0, 0, 0, 255))),
)?;
graphics::set_canvas(ctx, None);
graphics::draw(
ctx,
&self.canvas,
graphics::DrawParam::new().color(Color::from((255, 255, 255, 128))),
)?;
} else {
println!("Drawing without canvas");
graphics::set_canvas(ctx, None);
graphics::clear(ctx, [0.25, 0.0, 0.0, 1.0].into());
graphics::draw(
ctx,
&self.text,
graphics::DrawParam::new()
.dest(dest_point)
.color(Color::from((192, 128, 64, 255))),
)?;
}
graphics::present(ctx)?;
self.frames += 1;
if (self.frames % 100) == 0 {
println!("FPS: {}", ggez::timer::fps(ctx));
}
Ok(())
}
fn key_down_event(
&mut self,
_ctx: &mut Context,
_keycode: ggez::event::KeyCode,
_keymod: ggez::event::KeyMods,
|
println!("Canvas on: {}", self.draw_with_canvas);
}
}
}
pub fn main() -> GameResult {
let resource_dir = if let Ok(manifest_dir) = env::var("CARGO_MANIFEST_DIR") {
let mut path = path::PathBuf::from(manifest_dir);
path.push("resources");
path
} else {
path::PathBuf::from("./resources")
};
let cb = ggez::ContextBuilder::new("hello_canvas", "ggez").add_resource_path(resource_dir);
let (mut ctx, event_loop) = cb.build()?;
let state = MainState::new(&mut ctx)?;
event::run(ctx, event_loop, state)
}
|
repeat: bool,
) {
if !repeat {
self.draw_with_canvas = !self.draw_with_canvas;
|
random_line_split
|
relative-mouse-state.rs
|
extern crate sdl2;
use sdl2::event::Event;
use sdl2::mouse::MouseButton;
use std::time::Duration;
pub fn
|
() {
let sdl_context = sdl2::init().unwrap();
let video_subsystem = sdl_context.video().unwrap();
let _window = video_subsystem.window("Mouse", 800, 600)
.position_centered()
.build()
.unwrap();
let mut events = sdl_context.event_pump().unwrap();
let mut state;
'running: loop {
for event in events.poll_iter() {
match event {
Event::Quit {..} => break 'running,
_ => ()
}
}
// get a mouse state using mouse_state() so as not to call
// relative_mouse_state() twice and get a false position reading
if events.mouse_state().is_mouse_button_pressed(MouseButton::Left) {
state = events.relative_mouse_state();
println!("Relative - X = {:?}, Y = {:?}", state.x(), state.y());
}
std::thread::sleep(Duration::from_millis(100));
}
}
|
main
|
identifier_name
|
relative-mouse-state.rs
|
extern crate sdl2;
use sdl2::event::Event;
use sdl2::mouse::MouseButton;
use std::time::Duration;
pub fn main() {
let sdl_context = sdl2::init().unwrap();
let video_subsystem = sdl_context.video().unwrap();
let _window = video_subsystem.window("Mouse", 800, 600)
.position_centered()
.build()
.unwrap();
let mut events = sdl_context.event_pump().unwrap();
let mut state;
'running: loop {
for event in events.poll_iter() {
match event {
Event::Quit {..} => break 'running,
_ => ()
}
}
// get a mouse state using mouse_state() so as not to call
// relative_mouse_state() twice and get a false position reading
if events.mouse_state().is_mouse_button_pressed(MouseButton::Left) {
state = events.relative_mouse_state();
println!("Relative - X = {:?}, Y = {:?}", state.x(), state.y());
}
std::thread::sleep(Duration::from_millis(100));
|
}
|
}
|
random_line_split
|
relative-mouse-state.rs
|
extern crate sdl2;
use sdl2::event::Event;
use sdl2::mouse::MouseButton;
use std::time::Duration;
pub fn main()
|
// get a mouse state using mouse_state() so as not to call
// relative_mouse_state() twice and get a false position reading
if events.mouse_state().is_mouse_button_pressed(MouseButton::Left) {
state = events.relative_mouse_state();
println!("Relative - X = {:?}, Y = {:?}", state.x(), state.y());
}
std::thread::sleep(Duration::from_millis(100));
}
}
|
{
let sdl_context = sdl2::init().unwrap();
let video_subsystem = sdl_context.video().unwrap();
let _window = video_subsystem.window("Mouse", 800, 600)
.position_centered()
.build()
.unwrap();
let mut events = sdl_context.event_pump().unwrap();
let mut state;
'running: loop {
for event in events.poll_iter() {
match event {
Event::Quit {..} => break 'running,
_ => ()
}
}
|
identifier_body
|
main.rs
|
use std::collections::HashMap;
fn main() {
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
let teams = vec![String::from("Blue"), String::from("Yellow")];
let initial_scores = vec![10, 50];
let different_scores: HashMap<_, _> = teams.iter().zip(initial_scores.iter()).collect();
// Hash map becomes the owner of values used during inserting
let team_name = String::from("Blue");
let score = different_scores.get(&team_name);
for (key, value) in &different_scores {
println!("{}: {}", key, value);
}
// Insert overwrites things by default
scores.entry(String::from("Blue")).or_insert(100);
scores.entry(String::from("Red")).or_insert(200);
println!("{:?}", scores);
|
for word in text.split_whitespace() {
let count = map.entry(word).or_insert(0)
*count += 1;
}
println!("{:?}", map);
// It's possible to use different hasher than default one
}
|
let text = "hello wonderful world world";
let mut map = HashMap::new();
|
random_line_split
|
main.rs
|
use std::collections::HashMap;
fn main()
|
scores.entry(String::from("Blue")).or_insert(100);
scores.entry(String::from("Red")).or_insert(200);
println!("{:?}", scores);
let text = "hello wonderful world world";
let mut map = HashMap::new();
for word in text.split_whitespace() {
let count = map.entry(word).or_insert(0)
*count += 1;
}
println!("{:?}", map);
// It's possible to use different hasher than default one
}
|
{
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
let teams = vec![String::from("Blue"), String::from("Yellow")];
let initial_scores = vec![10, 50];
let different_scores: HashMap<_, _> = teams.iter().zip(initial_scores.iter()).collect();
// Hash map becomes the owner of values used during inserting
let team_name = String::from("Blue");
let score = different_scores.get(&team_name);
for (key, value) in &different_scores {
println!("{}: {}", key, value);
}
// Insert overwrites things by default
|
identifier_body
|
main.rs
|
use std::collections::HashMap;
fn
|
() {
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
let teams = vec![String::from("Blue"), String::from("Yellow")];
let initial_scores = vec![10, 50];
let different_scores: HashMap<_, _> = teams.iter().zip(initial_scores.iter()).collect();
// Hash map becomes the owner of values used during inserting
let team_name = String::from("Blue");
let score = different_scores.get(&team_name);
for (key, value) in &different_scores {
println!("{}: {}", key, value);
}
// Insert overwrites things by default
scores.entry(String::from("Blue")).or_insert(100);
scores.entry(String::from("Red")).or_insert(200);
println!("{:?}", scores);
let text = "hello wonderful world world";
let mut map = HashMap::new();
for word in text.split_whitespace() {
let count = map.entry(word).or_insert(0)
*count += 1;
}
println!("{:?}", map);
// It's possible to use different hasher than default one
}
|
main
|
identifier_name
|
host.rs
|
use std::convert::TryFrom;
use std::fmt;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
use std::str::FromStr;
use super::EndpointError;
use crate::ZmqError;
/// Represents a host address. Does not include the port, and may be either an
/// ip address or a domain name
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub enum Host {
/// An IPv4 address
Ipv4(Ipv4Addr),
/// An Ipv6 address
Ipv6(Ipv6Addr),
/// A domain name, such as `example.com` in `tcp://example.com:4567`.
Domain(String),
}
impl fmt::Display for Host {
fn fmt(&self, f: &mut fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
match self {
Host::Ipv4(addr) => write!(f, "{}", addr),
Host::Ipv6(addr) => write!(f, "{}", addr),
Host::Domain(name) => write!(f, "{}", name),
}
}
}
impl TryFrom<Host> for IpAddr {
type Error = ZmqError;
fn try_from(h: Host) -> Result<Self, Self::Error> {
match h {
Host::Ipv4(a) => Ok(IpAddr::V4(a)),
Host::Ipv6(a) => Ok(IpAddr::V6(a)),
Host::Domain(_) => Err(ZmqError::Other("Host was neither Ipv4 nor Ipv6")),
}
}
}
impl From<IpAddr> for Host {
fn from(a: IpAddr) -> Self {
match a {
IpAddr::V4(a) => Host::Ipv4(a),
IpAddr::V6(a) => Host::Ipv6(a),
}
}
}
impl TryFrom<String> for Host {
type Error = EndpointError;
/// An Ipv6 address must be enclosed by `[` and `]`.
fn
|
(s: String) -> Result<Self, Self::Error> {
if s.is_empty() {
return Err(EndpointError::Syntax("Host string should not be empty"));
}
if let Ok(addr) = s.parse::<Ipv4Addr>() {
return Ok(Host::Ipv4(addr));
}
// Attempt to parse ipv6 from either ::1 or [::1] using ascii
let ipv6_substr =
if s.starts_with('[') && s.len() >= 4 && *s.as_bytes().last().unwrap() == b']' {
let substr = &s[1..s.len() - 1];
debug_assert_eq!(substr.len(), s.len() - 2);
substr
} else {
&s
};
if let Ok(addr) = ipv6_substr.parse::<Ipv6Addr>() {
return Ok(Host::Ipv6(addr));
}
Ok(Host::Domain(s))
}
}
impl FromStr for Host {
type Err = EndpointError;
/// Equivalent to [`TryFrom<String>`]
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = s.to_string();
Self::try_from(s)
}
}
#[cfg(test)]
mod tests {
use super::*;
// These two tests on std are more for reference than any real test of
// functionality
#[test]
fn std_ipv6_parse() {
assert_eq!(Ipv6Addr::LOCALHOST, "::1".parse::<Ipv6Addr>().unwrap());
assert!("[::1]".parse::<Ipv6Addr>().is_err());
}
#[test]
fn std_ipv6_display() {
assert_eq!("::1", &Ipv6Addr::LOCALHOST.to_string());
}
#[test]
fn parse_and_display_nobracket_ipv6_same_as_std() {
let valid_addr_strs = vec![
"::1",
"::",
"2001:db8:a::123",
"2001:db8:0:0:0:0:2:1",
"2001:db8::2:1",
];
let invalid_addr_strs = vec!["", "[]", "[:]", ":"];
for valid in valid_addr_strs {
let parsed_std = valid.parse::<Ipv6Addr>().unwrap();
let parsed_host = valid.parse::<Host>().unwrap();
if let Host::Ipv6(parsed_host) = &parsed_host {
// Check that both are structurally the same
assert_eq!(&parsed_std, parsed_host);
} else {
panic!("Did not parse as IPV6!");
}
// Check that both display as the same
assert_eq!(parsed_std.to_string(), parsed_host.to_string());
}
for invalid in invalid_addr_strs {
invalid.parse::<Ipv6Addr>().unwrap_err();
let parsed_host = invalid.parse::<Host>();
if parsed_host.is_err() {
continue;
}
let parsed_host = parsed_host.unwrap();
if let Host::Domain(_) = parsed_host {
continue;
}
panic!(
"Expected that \"{}\" would not parse as Ipv6 or Ipv4, but instead it parsed as {:?}",
invalid, parsed_host
);
}
}
#[test]
fn parse_and_display_bracket_ipv6() {
let addr_strs = vec![
"[::1]",
"[::]",
"[2001:db8:a::123]",
"[2001:db8:0:0:0:0:2:1]",
"[2001:db8::2:1]",
];
fn remove_brackets(s: &str) -> &str {
assert!(s.starts_with('['));
assert!(s.ends_with(']'));
let result = &s[1..s.len() - 1];
assert_eq!(result.len(), s.len() - 2);
result
}
for addr_str in addr_strs {
let parsed_host: Host = addr_str.parse().unwrap();
assert!(addr_str.parse::<Ipv6Addr>().is_err());
if let Host::Ipv6(host_ipv6) = parsed_host {
assert_eq!(
host_ipv6,
remove_brackets(addr_str).parse::<Ipv6Addr>().unwrap()
);
assert_eq!(parsed_host.to_string(), host_ipv6.to_string());
} else {
panic!(
"Expected host to parse as Ipv6, but instead got {:?}",
parsed_host
);
}
}
}
}
|
try_from
|
identifier_name
|
host.rs
|
use std::convert::TryFrom;
use std::fmt;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
use std::str::FromStr;
use super::EndpointError;
use crate::ZmqError;
/// Represents a host address. Does not include the port, and may be either an
/// ip address or a domain name
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub enum Host {
/// An IPv4 address
Ipv4(Ipv4Addr),
/// An Ipv6 address
Ipv6(Ipv6Addr),
/// A domain name, such as `example.com` in `tcp://example.com:4567`.
Domain(String),
}
impl fmt::Display for Host {
fn fmt(&self, f: &mut fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
match self {
Host::Ipv4(addr) => write!(f, "{}", addr),
Host::Ipv6(addr) => write!(f, "{}", addr),
Host::Domain(name) => write!(f, "{}", name),
}
}
}
impl TryFrom<Host> for IpAddr {
type Error = ZmqError;
fn try_from(h: Host) -> Result<Self, Self::Error> {
match h {
Host::Ipv4(a) => Ok(IpAddr::V4(a)),
Host::Ipv6(a) => Ok(IpAddr::V6(a)),
Host::Domain(_) => Err(ZmqError::Other("Host was neither Ipv4 nor Ipv6")),
}
}
}
impl From<IpAddr> for Host {
fn from(a: IpAddr) -> Self {
match a {
IpAddr::V4(a) => Host::Ipv4(a),
IpAddr::V6(a) => Host::Ipv6(a),
}
}
}
impl TryFrom<String> for Host {
type Error = EndpointError;
/// An Ipv6 address must be enclosed by `[` and `]`.
fn try_from(s: String) -> Result<Self, Self::Error> {
if s.is_empty() {
return Err(EndpointError::Syntax("Host string should not be empty"));
}
if let Ok(addr) = s.parse::<Ipv4Addr>() {
return Ok(Host::Ipv4(addr));
}
// Attempt to parse ipv6 from either ::1 or [::1] using ascii
let ipv6_substr =
if s.starts_with('[') && s.len() >= 4 && *s.as_bytes().last().unwrap() == b']' {
let substr = &s[1..s.len() - 1];
debug_assert_eq!(substr.len(), s.len() - 2);
substr
} else {
&s
};
if let Ok(addr) = ipv6_substr.parse::<Ipv6Addr>() {
return Ok(Host::Ipv6(addr));
}
Ok(Host::Domain(s))
}
}
impl FromStr for Host {
type Err = EndpointError;
/// Equivalent to [`TryFrom<String>`]
fn from_str(s: &str) -> Result<Self, Self::Err>
|
}
#[cfg(test)]
mod tests {
use super::*;
// These two tests on std are more for reference than any real test of
// functionality
#[test]
fn std_ipv6_parse() {
assert_eq!(Ipv6Addr::LOCALHOST, "::1".parse::<Ipv6Addr>().unwrap());
assert!("[::1]".parse::<Ipv6Addr>().is_err());
}
#[test]
fn std_ipv6_display() {
assert_eq!("::1", &Ipv6Addr::LOCALHOST.to_string());
}
#[test]
fn parse_and_display_nobracket_ipv6_same_as_std() {
let valid_addr_strs = vec![
"::1",
"::",
"2001:db8:a::123",
"2001:db8:0:0:0:0:2:1",
"2001:db8::2:1",
];
let invalid_addr_strs = vec!["", "[]", "[:]", ":"];
for valid in valid_addr_strs {
let parsed_std = valid.parse::<Ipv6Addr>().unwrap();
let parsed_host = valid.parse::<Host>().unwrap();
if let Host::Ipv6(parsed_host) = &parsed_host {
// Check that both are structurally the same
assert_eq!(&parsed_std, parsed_host);
} else {
panic!("Did not parse as IPV6!");
}
// Check that both display as the same
assert_eq!(parsed_std.to_string(), parsed_host.to_string());
}
for invalid in invalid_addr_strs {
invalid.parse::<Ipv6Addr>().unwrap_err();
let parsed_host = invalid.parse::<Host>();
if parsed_host.is_err() {
continue;
}
let parsed_host = parsed_host.unwrap();
if let Host::Domain(_) = parsed_host {
continue;
}
panic!(
"Expected that \"{}\" would not parse as Ipv6 or Ipv4, but instead it parsed as {:?}",
invalid, parsed_host
);
}
}
#[test]
fn parse_and_display_bracket_ipv6() {
let addr_strs = vec![
"[::1]",
"[::]",
"[2001:db8:a::123]",
"[2001:db8:0:0:0:0:2:1]",
"[2001:db8::2:1]",
];
fn remove_brackets(s: &str) -> &str {
assert!(s.starts_with('['));
assert!(s.ends_with(']'));
let result = &s[1..s.len() - 1];
assert_eq!(result.len(), s.len() - 2);
result
}
for addr_str in addr_strs {
let parsed_host: Host = addr_str.parse().unwrap();
assert!(addr_str.parse::<Ipv6Addr>().is_err());
if let Host::Ipv6(host_ipv6) = parsed_host {
assert_eq!(
host_ipv6,
remove_brackets(addr_str).parse::<Ipv6Addr>().unwrap()
);
assert_eq!(parsed_host.to_string(), host_ipv6.to_string());
} else {
panic!(
"Expected host to parse as Ipv6, but instead got {:?}",
parsed_host
);
}
}
}
}
|
{
let s = s.to_string();
Self::try_from(s)
}
|
identifier_body
|
host.rs
|
use std::convert::TryFrom;
use std::fmt;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
use std::str::FromStr;
use super::EndpointError;
use crate::ZmqError;
/// Represents a host address. Does not include the port, and may be either an
/// ip address or a domain name
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub enum Host {
/// An IPv4 address
Ipv4(Ipv4Addr),
/// An Ipv6 address
Ipv6(Ipv6Addr),
/// A domain name, such as `example.com` in `tcp://example.com:4567`.
Domain(String),
}
impl fmt::Display for Host {
fn fmt(&self, f: &mut fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
match self {
Host::Ipv4(addr) => write!(f, "{}", addr),
Host::Ipv6(addr) => write!(f, "{}", addr),
Host::Domain(name) => write!(f, "{}", name),
}
}
}
impl TryFrom<Host> for IpAddr {
type Error = ZmqError;
fn try_from(h: Host) -> Result<Self, Self::Error> {
match h {
Host::Ipv4(a) => Ok(IpAddr::V4(a)),
Host::Ipv6(a) => Ok(IpAddr::V6(a)),
Host::Domain(_) => Err(ZmqError::Other("Host was neither Ipv4 nor Ipv6")),
}
}
}
impl From<IpAddr> for Host {
fn from(a: IpAddr) -> Self {
match a {
IpAddr::V4(a) => Host::Ipv4(a),
IpAddr::V6(a) => Host::Ipv6(a),
}
}
}
impl TryFrom<String> for Host {
type Error = EndpointError;
/// An Ipv6 address must be enclosed by `[` and `]`.
fn try_from(s: String) -> Result<Self, Self::Error> {
if s.is_empty() {
return Err(EndpointError::Syntax("Host string should not be empty"));
}
if let Ok(addr) = s.parse::<Ipv4Addr>() {
return Ok(Host::Ipv4(addr));
}
// Attempt to parse ipv6 from either ::1 or [::1] using ascii
let ipv6_substr =
if s.starts_with('[') && s.len() >= 4 && *s.as_bytes().last().unwrap() == b']' {
let substr = &s[1..s.len() - 1];
debug_assert_eq!(substr.len(), s.len() - 2);
substr
} else {
&s
};
if let Ok(addr) = ipv6_substr.parse::<Ipv6Addr>() {
return Ok(Host::Ipv6(addr));
}
Ok(Host::Domain(s))
}
}
impl FromStr for Host {
type Err = EndpointError;
/// Equivalent to [`TryFrom<String>`]
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = s.to_string();
Self::try_from(s)
}
}
#[cfg(test)]
mod tests {
use super::*;
// These two tests on std are more for reference than any real test of
// functionality
#[test]
fn std_ipv6_parse() {
assert_eq!(Ipv6Addr::LOCALHOST, "::1".parse::<Ipv6Addr>().unwrap());
assert!("[::1]".parse::<Ipv6Addr>().is_err());
}
#[test]
fn std_ipv6_display() {
assert_eq!("::1", &Ipv6Addr::LOCALHOST.to_string());
}
#[test]
fn parse_and_display_nobracket_ipv6_same_as_std() {
let valid_addr_strs = vec![
"::1",
"::",
"2001:db8:a::123",
"2001:db8:0:0:0:0:2:1",
"2001:db8::2:1",
];
let invalid_addr_strs = vec!["", "[]", "[:]", ":"];
for valid in valid_addr_strs {
let parsed_std = valid.parse::<Ipv6Addr>().unwrap();
let parsed_host = valid.parse::<Host>().unwrap();
if let Host::Ipv6(parsed_host) = &parsed_host {
// Check that both are structurally the same
assert_eq!(&parsed_std, parsed_host);
} else {
panic!("Did not parse as IPV6!");
}
// Check that both display as the same
assert_eq!(parsed_std.to_string(), parsed_host.to_string());
}
for invalid in invalid_addr_strs {
invalid.parse::<Ipv6Addr>().unwrap_err();
let parsed_host = invalid.parse::<Host>();
if parsed_host.is_err() {
|
let parsed_host = parsed_host.unwrap();
if let Host::Domain(_) = parsed_host {
continue;
}
panic!(
"Expected that \"{}\" would not parse as Ipv6 or Ipv4, but instead it parsed as {:?}",
invalid, parsed_host
);
}
}
#[test]
fn parse_and_display_bracket_ipv6() {
let addr_strs = vec![
"[::1]",
"[::]",
"[2001:db8:a::123]",
"[2001:db8:0:0:0:0:2:1]",
"[2001:db8::2:1]",
];
fn remove_brackets(s: &str) -> &str {
assert!(s.starts_with('['));
assert!(s.ends_with(']'));
let result = &s[1..s.len() - 1];
assert_eq!(result.len(), s.len() - 2);
result
}
for addr_str in addr_strs {
let parsed_host: Host = addr_str.parse().unwrap();
assert!(addr_str.parse::<Ipv6Addr>().is_err());
if let Host::Ipv6(host_ipv6) = parsed_host {
assert_eq!(
host_ipv6,
remove_brackets(addr_str).parse::<Ipv6Addr>().unwrap()
);
assert_eq!(parsed_host.to_string(), host_ipv6.to_string());
} else {
panic!(
"Expected host to parse as Ipv6, but instead got {:?}",
parsed_host
);
}
}
}
}
|
continue;
}
|
random_line_split
|
cksum.rs
|
#![crate_name = "cksum"]
#![feature(collections, core, old_io, old_path, rustc_private)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Michael Gehring <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
extern crate getopts;
use std::old_io::{EndOfFile, File, IoError, IoResult, print};
use std::old_io::stdio::stdin_raw;
use std::mem;
use crc_table::CRC_TABLE;
#[path="../common/util.rs"]
#[macro_use]
mod util;
mod crc_table;
static NAME: &'static str = "cksum";
static VERSION: &'static str = "1.0.0";
#[inline]
fn crc_update(crc: u32, input: u8) -> u32 {
(crc << 8) ^ CRC_TABLE[((crc >> 24) as usize ^ input as usize) & 0xFF]
}
#[inline]
fn crc_final(mut crc: u32, mut length: usize) -> u32 {
while length!= 0 {
crc = crc_update(crc, length as u8);
length >>= 8;
}
!crc
}
#[inline]
fn cksum(fname: &str) -> IoResult<(u32, usize)> {
let mut crc = 0u32;
let mut size = 0usize;
let mut stdin_buf;
let mut file_buf;
let rd = match fname {
"-" => {
stdin_buf = stdin_raw();
&mut stdin_buf as &mut Reader
}
_ => {
file_buf = try!(File::open(&Path::new(fname)));
&mut file_buf as &mut Reader
}
};
let mut bytes: [u8; 1024 * 1024] = unsafe { mem::uninitialized() };
loop {
match rd.read(&mut bytes) {
Ok(num_bytes) => {
for &b in bytes[..num_bytes].iter() {
crc = crc_update(crc, b);
}
size += num_bytes;
}
Err(IoError { kind: EndOfFile,.. }) => return Ok((crc_final(crc, size), size)),
Err(err) => return Err(err)
}
}
}
pub fn uumain(args: Vec<String>) -> i32 {
let opts = [
getopts::optflag("h", "help", "display this help and exit"),
getopts::optflag("V", "version", "output version information and exit"),
];
let matches = match getopts::getopts(args.tail(), &opts) {
Ok(m) => m,
Err(err) => panic!("{}", err),
};
if matches.opt_present("help") {
println!("{} {}", NAME, VERSION);
println!("");
println!("Usage:");
println!(" {} [OPTIONS] [FILE]...", NAME);
println!("");
print(getopts::usage("Print CRC and size for each file.", opts.as_slice()).as_slice());
return 0;
}
if matches.opt_present("version") {
println!("{} {}", NAME, VERSION);
return 0;
}
let files = matches.free;
if files.is_empty() {
|
Ok((crc, size)) => println!("{} {}", crc, size),
Err(err) => {
show_error!("{}", err);
return 2;
}
}
return 0;
}
let mut exit_code = 0;
for fname in files.iter() {
match cksum(fname.as_slice()) {
Ok((crc, size)) => println!("{} {} {}", crc, size, fname),
Err(err) => {
show_error!("'{}' {}", fname, err);
exit_code = 2;
}
}
}
exit_code
}
|
match cksum("-") {
|
random_line_split
|
cksum.rs
|
#![crate_name = "cksum"]
#![feature(collections, core, old_io, old_path, rustc_private)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Michael Gehring <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
extern crate getopts;
use std::old_io::{EndOfFile, File, IoError, IoResult, print};
use std::old_io::stdio::stdin_raw;
use std::mem;
use crc_table::CRC_TABLE;
#[path="../common/util.rs"]
#[macro_use]
mod util;
mod crc_table;
static NAME: &'static str = "cksum";
static VERSION: &'static str = "1.0.0";
#[inline]
fn
|
(crc: u32, input: u8) -> u32 {
(crc << 8) ^ CRC_TABLE[((crc >> 24) as usize ^ input as usize) & 0xFF]
}
#[inline]
fn crc_final(mut crc: u32, mut length: usize) -> u32 {
while length!= 0 {
crc = crc_update(crc, length as u8);
length >>= 8;
}
!crc
}
#[inline]
fn cksum(fname: &str) -> IoResult<(u32, usize)> {
let mut crc = 0u32;
let mut size = 0usize;
let mut stdin_buf;
let mut file_buf;
let rd = match fname {
"-" => {
stdin_buf = stdin_raw();
&mut stdin_buf as &mut Reader
}
_ => {
file_buf = try!(File::open(&Path::new(fname)));
&mut file_buf as &mut Reader
}
};
let mut bytes: [u8; 1024 * 1024] = unsafe { mem::uninitialized() };
loop {
match rd.read(&mut bytes) {
Ok(num_bytes) => {
for &b in bytes[..num_bytes].iter() {
crc = crc_update(crc, b);
}
size += num_bytes;
}
Err(IoError { kind: EndOfFile,.. }) => return Ok((crc_final(crc, size), size)),
Err(err) => return Err(err)
}
}
}
pub fn uumain(args: Vec<String>) -> i32 {
let opts = [
getopts::optflag("h", "help", "display this help and exit"),
getopts::optflag("V", "version", "output version information and exit"),
];
let matches = match getopts::getopts(args.tail(), &opts) {
Ok(m) => m,
Err(err) => panic!("{}", err),
};
if matches.opt_present("help") {
println!("{} {}", NAME, VERSION);
println!("");
println!("Usage:");
println!(" {} [OPTIONS] [FILE]...", NAME);
println!("");
print(getopts::usage("Print CRC and size for each file.", opts.as_slice()).as_slice());
return 0;
}
if matches.opt_present("version") {
println!("{} {}", NAME, VERSION);
return 0;
}
let files = matches.free;
if files.is_empty() {
match cksum("-") {
Ok((crc, size)) => println!("{} {}", crc, size),
Err(err) => {
show_error!("{}", err);
return 2;
}
}
return 0;
}
let mut exit_code = 0;
for fname in files.iter() {
match cksum(fname.as_slice()) {
Ok((crc, size)) => println!("{} {} {}", crc, size, fname),
Err(err) => {
show_error!("'{}' {}", fname, err);
exit_code = 2;
}
}
}
exit_code
}
|
crc_update
|
identifier_name
|
cksum.rs
|
#![crate_name = "cksum"]
#![feature(collections, core, old_io, old_path, rustc_private)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Michael Gehring <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
extern crate getopts;
use std::old_io::{EndOfFile, File, IoError, IoResult, print};
use std::old_io::stdio::stdin_raw;
use std::mem;
use crc_table::CRC_TABLE;
#[path="../common/util.rs"]
#[macro_use]
mod util;
mod crc_table;
static NAME: &'static str = "cksum";
static VERSION: &'static str = "1.0.0";
#[inline]
fn crc_update(crc: u32, input: u8) -> u32 {
(crc << 8) ^ CRC_TABLE[((crc >> 24) as usize ^ input as usize) & 0xFF]
}
#[inline]
fn crc_final(mut crc: u32, mut length: usize) -> u32 {
while length!= 0 {
crc = crc_update(crc, length as u8);
length >>= 8;
}
!crc
}
#[inline]
fn cksum(fname: &str) -> IoResult<(u32, usize)>
|
Ok(num_bytes) => {
for &b in bytes[..num_bytes].iter() {
crc = crc_update(crc, b);
}
size += num_bytes;
}
Err(IoError { kind: EndOfFile,.. }) => return Ok((crc_final(crc, size), size)),
Err(err) => return Err(err)
}
}
}
pub fn uumain(args: Vec<String>) -> i32 {
let opts = [
getopts::optflag("h", "help", "display this help and exit"),
getopts::optflag("V", "version", "output version information and exit"),
];
let matches = match getopts::getopts(args.tail(), &opts) {
Ok(m) => m,
Err(err) => panic!("{}", err),
};
if matches.opt_present("help") {
println!("{} {}", NAME, VERSION);
println!("");
println!("Usage:");
println!(" {} [OPTIONS] [FILE]...", NAME);
println!("");
print(getopts::usage("Print CRC and size for each file.", opts.as_slice()).as_slice());
return 0;
}
if matches.opt_present("version") {
println!("{} {}", NAME, VERSION);
return 0;
}
let files = matches.free;
if files.is_empty() {
match cksum("-") {
Ok((crc, size)) => println!("{} {}", crc, size),
Err(err) => {
show_error!("{}", err);
return 2;
}
}
return 0;
}
let mut exit_code = 0;
for fname in files.iter() {
match cksum(fname.as_slice()) {
Ok((crc, size)) => println!("{} {} {}", crc, size, fname),
Err(err) => {
show_error!("'{}' {}", fname, err);
exit_code = 2;
}
}
}
exit_code
}
|
{
let mut crc = 0u32;
let mut size = 0usize;
let mut stdin_buf;
let mut file_buf;
let rd = match fname {
"-" => {
stdin_buf = stdin_raw();
&mut stdin_buf as &mut Reader
}
_ => {
file_buf = try!(File::open(&Path::new(fname)));
&mut file_buf as &mut Reader
}
};
let mut bytes: [u8; 1024 * 1024] = unsafe { mem::uninitialized() };
loop {
match rd.read(&mut bytes) {
|
identifier_body
|
early-vtbl-resolution.rs
|
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait thing<A> {
fn foo(&self) -> Option<A>;
}
impl<A> thing<A> for int {
fn foo(&self) -> Option<A> { None }
}
fn foo_func<A, B: thing<A>>(x: B) -> Option<A> { x.foo() }
struct A { a: int }
pub fn main() {
let _x: Option<f64> = foo_func(0);
}
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
random_line_split
|
|
early-vtbl-resolution.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait thing<A> {
fn foo(&self) -> Option<A>;
}
impl<A> thing<A> for int {
fn foo(&self) -> Option<A> { None }
}
fn foo_func<A, B: thing<A>>(x: B) -> Option<A> { x.foo() }
struct A { a: int }
pub fn main()
|
{
let _x: Option<f64> = foo_func(0);
}
|
identifier_body
|
|
early-vtbl-resolution.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait thing<A> {
fn foo(&self) -> Option<A>;
}
impl<A> thing<A> for int {
fn foo(&self) -> Option<A> { None }
}
fn foo_func<A, B: thing<A>>(x: B) -> Option<A> { x.foo() }
struct
|
{ a: int }
pub fn main() {
let _x: Option<f64> = foo_func(0);
}
|
A
|
identifier_name
|
lib.rs
|
// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//! # Cap'n Proto Runtime Library
//!
//! [Cap'n Proto](https://capnproto.org) is an extremely efficient protocol for
//! sharing data and capabilities.
//!
//! The Rust implementation is split into three separate crates.
//!
//! Code generation is handled by [capnpc-rust](https://github.com/dwrensha/capnpc-rust).
//!
//! The present crate is the runtime library required by that generated code. It is hosted on Github
//! [here](https://github.com/dwrensha/capnproto-rust).
//!
//! [capnp-rpc-rust](https://github.com/dwrensha/capnp-rpc-rust) is an implementation of a
//! distributed object-capability layer.
#![allow(raw_pointer_derive)]
#![crate_name="capnp"]
#![crate_type = "lib"]
extern crate byteorder;
#[cfg(test)]
extern crate quickcheck;
pub mod any_pointer;
pub mod capability;
pub mod data;
pub mod data_list;
pub mod enum_list;
pub mod list_list;
pub mod message;
pub mod primitive_list;
pub mod private;
pub mod serialize;
pub mod serialize_packed;
pub mod struct_list;
pub mod text;
pub mod text_list;
pub mod traits;
mod util;
/// Eight bytes of memory with opaque interior.
///
/// This type is used to ensure that the data of a message is properly aligned.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(C)]
pub struct Word(u64);
impl Word {
/// Does this, but faster:
/// `::std::iter::repeat(Word(0)).take(length).collect()`
pub fn allocate_zeroed_vec(length: usize) -> Vec<Word> {
let mut result : Vec<Word> = Vec::with_capacity(length);
unsafe {
result.set_len(length);
let p : *mut u8 = result.as_mut_ptr() as *mut u8;
::std::ptr::write_bytes(p, 0u8, length * ::std::mem::size_of::<Word>());
}
return result;
}
pub fn bytes_to_words<'a>(bytes: &'a [u8]) -> &'a [Word] {
unsafe {
::std::slice::from_raw_parts(bytes.as_ptr() as *const Word, bytes.len() / 8)
}
}
pub fn words_to_bytes<'a>(words: &'a [Word]) -> &'a [u8] {
unsafe {
::std::slice::from_raw_parts(words.as_ptr() as *const u8, words.len() * 8)
}
}
pub fn words_to_bytes_mut<'a>(words: &'a mut [Word]) -> &'a mut [u8] {
unsafe {
::std::slice::from_raw_parts_mut(words.as_mut_ptr() as *mut u8, words.len() * 8)
}
}
#[cfg(test)]
pub fn from(n: u64) -> Word {
Word(n)
}
}
#[cfg(test)]
impl quickcheck::Arbitrary for Word {
fn arbitrary<G: quickcheck::Gen>(g: &mut G) -> Word {
Word(quickcheck::Arbitrary::arbitrary(g))
}
fn shrink(&self) -> Box<Iterator<Item=Word>+'static> {
Box::new(quickcheck::Arbitrary::shrink(&self.0).map(|value| Word(value)))
}
}
/// Size of a message. Every generated struct has a method `.total_size()` that returns this.
#[derive(Clone, Copy, PartialEq)]
pub struct MessageSize {
pub word_count : u64,
/// Size of the capability table.
pub cap_count : u32
}
impl MessageSize {
pub fn plus_eq(&mut self, other : MessageSize) {
self.word_count += other.word_count;
self.cap_count += other.cap_count;
}
}
/// An enum value or union discriminant that was not found among those defined in a schema.
#[derive(PartialEq, Clone, Copy, Debug)]
pub struct NotInSchema(pub u16);
|
impl ::std::fmt::Display for NotInSchema {
fn fmt(&self, fmt : &mut ::std::fmt::Formatter) -> ::std::result::Result<(), ::std::fmt::Error> {
write!(fmt, "Enum value or union discriminant {} was not present in the schema.", self.0)
}
}
impl ::std::error::Error for NotInSchema {
fn description(&self) -> &str {
"Enum value or union disriminant was not present in schema."
}
}
/// Because messages are lazily validated, the return type of any method that reads a pointer field
/// must be wrapped in a Result.
pub type Result<T> = ::std::result::Result<T, Error>;
/// Things that can go wrong when you read a message.
#[derive(Debug)]
pub enum Error {
Decode { description : &'static str,
detail : Option<String> },
Io(std::io::Error),
}
impl Error {
pub fn new_decode_error(description : &'static str, detail : Option<String>) -> Error {
Error::Decode { description : description, detail : detail}
}
}
impl ::std::convert::From<::std::io::Error> for Error {
fn from(err : ::std::io::Error) -> Error {
Error::Io(err)
}
}
impl ::std::convert::From<NotInSchema> for Error {
fn from(e : NotInSchema) -> Error {
Error::new_decode_error("Enum value or union discriminant was not present in schema.",
Some(format!("value : {}", e.0)))
}
}
impl ::std::fmt::Display for Error {
fn fmt(&self, fmt : &mut ::std::fmt::Formatter) -> ::std::result::Result<(), ::std::fmt::Error> {
match *self {
Error::Decode { ref description, detail : Some(ref detail) } => {
write!(fmt, "{} {}", description, detail)
},
Error::Decode { ref description,.. } => write!(fmt, "{}", description),
Error::Io(ref io) => io.fmt(fmt),
}
}
}
impl ::std::error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::Decode { ref description,.. } => description,
Error::Io(ref io) => ::std::error::Error::description(io),
}
}
fn cause(&self) -> Option<&::std::error::Error> {
match *self {
Error::Decode {.. } => None,
Error::Io(ref io) => io.cause(),
}
}
}
/// Helper struct that allows `MessageBuilder::get_segments_for_output()` to avoid heap allocations
/// in the single-segment case.
pub enum OutputSegments<'a> {
#[doc(hidden)]
SingleSegment([&'a [Word]; 1]),
#[doc(hidden)]
MultiSegment(Vec<&'a [Word]>),
}
impl <'a> ::std::ops::Deref for OutputSegments<'a> {
type Target = [&'a [Word]];
fn deref<'b>(&'b self) -> &'b [&'a [Word]] {
match self {
&OutputSegments::SingleSegment(ref s) => {
s
}
&OutputSegments::MultiSegment(ref v) => {
&*v
}
}
}
}
|
random_line_split
|
|
lib.rs
|
// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//! # Cap'n Proto Runtime Library
//!
//! [Cap'n Proto](https://capnproto.org) is an extremely efficient protocol for
//! sharing data and capabilities.
//!
//! The Rust implementation is split into three separate crates.
//!
//! Code generation is handled by [capnpc-rust](https://github.com/dwrensha/capnpc-rust).
//!
//! The present crate is the runtime library required by that generated code. It is hosted on Github
//! [here](https://github.com/dwrensha/capnproto-rust).
//!
//! [capnp-rpc-rust](https://github.com/dwrensha/capnp-rpc-rust) is an implementation of a
//! distributed object-capability layer.
#![allow(raw_pointer_derive)]
#![crate_name="capnp"]
#![crate_type = "lib"]
extern crate byteorder;
#[cfg(test)]
extern crate quickcheck;
pub mod any_pointer;
pub mod capability;
pub mod data;
pub mod data_list;
pub mod enum_list;
pub mod list_list;
pub mod message;
pub mod primitive_list;
pub mod private;
pub mod serialize;
pub mod serialize_packed;
pub mod struct_list;
pub mod text;
pub mod text_list;
pub mod traits;
mod util;
/// Eight bytes of memory with opaque interior.
///
/// This type is used to ensure that the data of a message is properly aligned.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(C)]
pub struct Word(u64);
impl Word {
/// Does this, but faster:
/// `::std::iter::repeat(Word(0)).take(length).collect()`
pub fn allocate_zeroed_vec(length: usize) -> Vec<Word> {
let mut result : Vec<Word> = Vec::with_capacity(length);
unsafe {
result.set_len(length);
let p : *mut u8 = result.as_mut_ptr() as *mut u8;
::std::ptr::write_bytes(p, 0u8, length * ::std::mem::size_of::<Word>());
}
return result;
}
pub fn bytes_to_words<'a>(bytes: &'a [u8]) -> &'a [Word] {
unsafe {
::std::slice::from_raw_parts(bytes.as_ptr() as *const Word, bytes.len() / 8)
}
}
pub fn words_to_bytes<'a>(words: &'a [Word]) -> &'a [u8] {
unsafe {
::std::slice::from_raw_parts(words.as_ptr() as *const u8, words.len() * 8)
}
}
pub fn words_to_bytes_mut<'a>(words: &'a mut [Word]) -> &'a mut [u8] {
unsafe {
::std::slice::from_raw_parts_mut(words.as_mut_ptr() as *mut u8, words.len() * 8)
}
}
#[cfg(test)]
pub fn from(n: u64) -> Word {
Word(n)
}
}
#[cfg(test)]
impl quickcheck::Arbitrary for Word {
fn arbitrary<G: quickcheck::Gen>(g: &mut G) -> Word {
Word(quickcheck::Arbitrary::arbitrary(g))
}
fn shrink(&self) -> Box<Iterator<Item=Word>+'static> {
Box::new(quickcheck::Arbitrary::shrink(&self.0).map(|value| Word(value)))
}
}
/// Size of a message. Every generated struct has a method `.total_size()` that returns this.
#[derive(Clone, Copy, PartialEq)]
pub struct MessageSize {
pub word_count : u64,
/// Size of the capability table.
pub cap_count : u32
}
impl MessageSize {
pub fn plus_eq(&mut self, other : MessageSize) {
self.word_count += other.word_count;
self.cap_count += other.cap_count;
}
}
/// An enum value or union discriminant that was not found among those defined in a schema.
#[derive(PartialEq, Clone, Copy, Debug)]
pub struct NotInSchema(pub u16);
impl ::std::fmt::Display for NotInSchema {
fn
|
(&self, fmt : &mut ::std::fmt::Formatter) -> ::std::result::Result<(), ::std::fmt::Error> {
write!(fmt, "Enum value or union discriminant {} was not present in the schema.", self.0)
}
}
impl ::std::error::Error for NotInSchema {
fn description(&self) -> &str {
"Enum value or union disriminant was not present in schema."
}
}
/// Because messages are lazily validated, the return type of any method that reads a pointer field
/// must be wrapped in a Result.
pub type Result<T> = ::std::result::Result<T, Error>;
/// Things that can go wrong when you read a message.
#[derive(Debug)]
pub enum Error {
Decode { description : &'static str,
detail : Option<String> },
Io(std::io::Error),
}
impl Error {
pub fn new_decode_error(description : &'static str, detail : Option<String>) -> Error {
Error::Decode { description : description, detail : detail}
}
}
impl ::std::convert::From<::std::io::Error> for Error {
fn from(err : ::std::io::Error) -> Error {
Error::Io(err)
}
}
impl ::std::convert::From<NotInSchema> for Error {
fn from(e : NotInSchema) -> Error {
Error::new_decode_error("Enum value or union discriminant was not present in schema.",
Some(format!("value : {}", e.0)))
}
}
impl ::std::fmt::Display for Error {
fn fmt(&self, fmt : &mut ::std::fmt::Formatter) -> ::std::result::Result<(), ::std::fmt::Error> {
match *self {
Error::Decode { ref description, detail : Some(ref detail) } => {
write!(fmt, "{} {}", description, detail)
},
Error::Decode { ref description,.. } => write!(fmt, "{}", description),
Error::Io(ref io) => io.fmt(fmt),
}
}
}
impl ::std::error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::Decode { ref description,.. } => description,
Error::Io(ref io) => ::std::error::Error::description(io),
}
}
fn cause(&self) -> Option<&::std::error::Error> {
match *self {
Error::Decode {.. } => None,
Error::Io(ref io) => io.cause(),
}
}
}
/// Helper struct that allows `MessageBuilder::get_segments_for_output()` to avoid heap allocations
/// in the single-segment case.
pub enum OutputSegments<'a> {
#[doc(hidden)]
SingleSegment([&'a [Word]; 1]),
#[doc(hidden)]
MultiSegment(Vec<&'a [Word]>),
}
impl <'a> ::std::ops::Deref for OutputSegments<'a> {
type Target = [&'a [Word]];
fn deref<'b>(&'b self) -> &'b [&'a [Word]] {
match self {
&OutputSegments::SingleSegment(ref s) => {
s
}
&OutputSegments::MultiSegment(ref v) => {
&*v
}
}
}
}
|
fmt
|
identifier_name
|
lib.rs
|
// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//! # Cap'n Proto Runtime Library
//!
//! [Cap'n Proto](https://capnproto.org) is an extremely efficient protocol for
//! sharing data and capabilities.
//!
//! The Rust implementation is split into three separate crates.
//!
//! Code generation is handled by [capnpc-rust](https://github.com/dwrensha/capnpc-rust).
//!
//! The present crate is the runtime library required by that generated code. It is hosted on Github
//! [here](https://github.com/dwrensha/capnproto-rust).
//!
//! [capnp-rpc-rust](https://github.com/dwrensha/capnp-rpc-rust) is an implementation of a
//! distributed object-capability layer.
#![allow(raw_pointer_derive)]
#![crate_name="capnp"]
#![crate_type = "lib"]
extern crate byteorder;
#[cfg(test)]
extern crate quickcheck;
pub mod any_pointer;
pub mod capability;
pub mod data;
pub mod data_list;
pub mod enum_list;
pub mod list_list;
pub mod message;
pub mod primitive_list;
pub mod private;
pub mod serialize;
pub mod serialize_packed;
pub mod struct_list;
pub mod text;
pub mod text_list;
pub mod traits;
mod util;
/// Eight bytes of memory with opaque interior.
///
/// This type is used to ensure that the data of a message is properly aligned.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(C)]
pub struct Word(u64);
impl Word {
/// Does this, but faster:
/// `::std::iter::repeat(Word(0)).take(length).collect()`
pub fn allocate_zeroed_vec(length: usize) -> Vec<Word> {
let mut result : Vec<Word> = Vec::with_capacity(length);
unsafe {
result.set_len(length);
let p : *mut u8 = result.as_mut_ptr() as *mut u8;
::std::ptr::write_bytes(p, 0u8, length * ::std::mem::size_of::<Word>());
}
return result;
}
pub fn bytes_to_words<'a>(bytes: &'a [u8]) -> &'a [Word] {
unsafe {
::std::slice::from_raw_parts(bytes.as_ptr() as *const Word, bytes.len() / 8)
}
}
pub fn words_to_bytes<'a>(words: &'a [Word]) -> &'a [u8] {
unsafe {
::std::slice::from_raw_parts(words.as_ptr() as *const u8, words.len() * 8)
}
}
pub fn words_to_bytes_mut<'a>(words: &'a mut [Word]) -> &'a mut [u8] {
unsafe {
::std::slice::from_raw_parts_mut(words.as_mut_ptr() as *mut u8, words.len() * 8)
}
}
#[cfg(test)]
pub fn from(n: u64) -> Word {
Word(n)
}
}
#[cfg(test)]
impl quickcheck::Arbitrary for Word {
fn arbitrary<G: quickcheck::Gen>(g: &mut G) -> Word {
Word(quickcheck::Arbitrary::arbitrary(g))
}
fn shrink(&self) -> Box<Iterator<Item=Word>+'static> {
Box::new(quickcheck::Arbitrary::shrink(&self.0).map(|value| Word(value)))
}
}
/// Size of a message. Every generated struct has a method `.total_size()` that returns this.
#[derive(Clone, Copy, PartialEq)]
pub struct MessageSize {
pub word_count : u64,
/// Size of the capability table.
pub cap_count : u32
}
impl MessageSize {
pub fn plus_eq(&mut self, other : MessageSize) {
self.word_count += other.word_count;
self.cap_count += other.cap_count;
}
}
/// An enum value or union discriminant that was not found among those defined in a schema.
#[derive(PartialEq, Clone, Copy, Debug)]
pub struct NotInSchema(pub u16);
impl ::std::fmt::Display for NotInSchema {
fn fmt(&self, fmt : &mut ::std::fmt::Formatter) -> ::std::result::Result<(), ::std::fmt::Error> {
write!(fmt, "Enum value or union discriminant {} was not present in the schema.", self.0)
}
}
impl ::std::error::Error for NotInSchema {
fn description(&self) -> &str {
"Enum value or union disriminant was not present in schema."
}
}
/// Because messages are lazily validated, the return type of any method that reads a pointer field
/// must be wrapped in a Result.
pub type Result<T> = ::std::result::Result<T, Error>;
/// Things that can go wrong when you read a message.
#[derive(Debug)]
pub enum Error {
Decode { description : &'static str,
detail : Option<String> },
Io(std::io::Error),
}
impl Error {
pub fn new_decode_error(description : &'static str, detail : Option<String>) -> Error {
Error::Decode { description : description, detail : detail}
}
}
impl ::std::convert::From<::std::io::Error> for Error {
fn from(err : ::std::io::Error) -> Error {
Error::Io(err)
}
}
impl ::std::convert::From<NotInSchema> for Error {
fn from(e : NotInSchema) -> Error {
Error::new_decode_error("Enum value or union discriminant was not present in schema.",
Some(format!("value : {}", e.0)))
}
}
impl ::std::fmt::Display for Error {
fn fmt(&self, fmt : &mut ::std::fmt::Formatter) -> ::std::result::Result<(), ::std::fmt::Error> {
match *self {
Error::Decode { ref description, detail : Some(ref detail) } =>
|
,
Error::Decode { ref description,.. } => write!(fmt, "{}", description),
Error::Io(ref io) => io.fmt(fmt),
}
}
}
impl ::std::error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::Decode { ref description,.. } => description,
Error::Io(ref io) => ::std::error::Error::description(io),
}
}
fn cause(&self) -> Option<&::std::error::Error> {
match *self {
Error::Decode {.. } => None,
Error::Io(ref io) => io.cause(),
}
}
}
/// Helper struct that allows `MessageBuilder::get_segments_for_output()` to avoid heap allocations
/// in the single-segment case.
pub enum OutputSegments<'a> {
#[doc(hidden)]
SingleSegment([&'a [Word]; 1]),
#[doc(hidden)]
MultiSegment(Vec<&'a [Word]>),
}
impl <'a> ::std::ops::Deref for OutputSegments<'a> {
type Target = [&'a [Word]];
fn deref<'b>(&'b self) -> &'b [&'a [Word]] {
match self {
&OutputSegments::SingleSegment(ref s) => {
s
}
&OutputSegments::MultiSegment(ref v) => {
&*v
}
}
}
}
|
{
write!(fmt, "{} {}", description, detail)
}
|
conditional_block
|
lib.rs
|
// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//! # Cap'n Proto Runtime Library
//!
//! [Cap'n Proto](https://capnproto.org) is an extremely efficient protocol for
//! sharing data and capabilities.
//!
//! The Rust implementation is split into three separate crates.
//!
//! Code generation is handled by [capnpc-rust](https://github.com/dwrensha/capnpc-rust).
//!
//! The present crate is the runtime library required by that generated code. It is hosted on Github
//! [here](https://github.com/dwrensha/capnproto-rust).
//!
//! [capnp-rpc-rust](https://github.com/dwrensha/capnp-rpc-rust) is an implementation of a
//! distributed object-capability layer.
#![allow(raw_pointer_derive)]
#![crate_name="capnp"]
#![crate_type = "lib"]
extern crate byteorder;
#[cfg(test)]
extern crate quickcheck;
pub mod any_pointer;
pub mod capability;
pub mod data;
pub mod data_list;
pub mod enum_list;
pub mod list_list;
pub mod message;
pub mod primitive_list;
pub mod private;
pub mod serialize;
pub mod serialize_packed;
pub mod struct_list;
pub mod text;
pub mod text_list;
pub mod traits;
mod util;
/// Eight bytes of memory with opaque interior.
///
/// This type is used to ensure that the data of a message is properly aligned.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(C)]
pub struct Word(u64);
impl Word {
/// Does this, but faster:
/// `::std::iter::repeat(Word(0)).take(length).collect()`
pub fn allocate_zeroed_vec(length: usize) -> Vec<Word>
|
pub fn bytes_to_words<'a>(bytes: &'a [u8]) -> &'a [Word] {
unsafe {
::std::slice::from_raw_parts(bytes.as_ptr() as *const Word, bytes.len() / 8)
}
}
pub fn words_to_bytes<'a>(words: &'a [Word]) -> &'a [u8] {
unsafe {
::std::slice::from_raw_parts(words.as_ptr() as *const u8, words.len() * 8)
}
}
pub fn words_to_bytes_mut<'a>(words: &'a mut [Word]) -> &'a mut [u8] {
unsafe {
::std::slice::from_raw_parts_mut(words.as_mut_ptr() as *mut u8, words.len() * 8)
}
}
#[cfg(test)]
pub fn from(n: u64) -> Word {
Word(n)
}
}
#[cfg(test)]
impl quickcheck::Arbitrary for Word {
fn arbitrary<G: quickcheck::Gen>(g: &mut G) -> Word {
Word(quickcheck::Arbitrary::arbitrary(g))
}
fn shrink(&self) -> Box<Iterator<Item=Word>+'static> {
Box::new(quickcheck::Arbitrary::shrink(&self.0).map(|value| Word(value)))
}
}
/// Size of a message. Every generated struct has a method `.total_size()` that returns this.
#[derive(Clone, Copy, PartialEq)]
pub struct MessageSize {
pub word_count : u64,
/// Size of the capability table.
pub cap_count : u32
}
impl MessageSize {
pub fn plus_eq(&mut self, other : MessageSize) {
self.word_count += other.word_count;
self.cap_count += other.cap_count;
}
}
/// An enum value or union discriminant that was not found among those defined in a schema.
#[derive(PartialEq, Clone, Copy, Debug)]
pub struct NotInSchema(pub u16);
impl ::std::fmt::Display for NotInSchema {
fn fmt(&self, fmt : &mut ::std::fmt::Formatter) -> ::std::result::Result<(), ::std::fmt::Error> {
write!(fmt, "Enum value or union discriminant {} was not present in the schema.", self.0)
}
}
impl ::std::error::Error for NotInSchema {
fn description(&self) -> &str {
"Enum value or union disriminant was not present in schema."
}
}
/// Because messages are lazily validated, the return type of any method that reads a pointer field
/// must be wrapped in a Result.
pub type Result<T> = ::std::result::Result<T, Error>;
/// Things that can go wrong when you read a message.
#[derive(Debug)]
pub enum Error {
Decode { description : &'static str,
detail : Option<String> },
Io(std::io::Error),
}
impl Error {
pub fn new_decode_error(description : &'static str, detail : Option<String>) -> Error {
Error::Decode { description : description, detail : detail}
}
}
impl ::std::convert::From<::std::io::Error> for Error {
fn from(err : ::std::io::Error) -> Error {
Error::Io(err)
}
}
impl ::std::convert::From<NotInSchema> for Error {
fn from(e : NotInSchema) -> Error {
Error::new_decode_error("Enum value or union discriminant was not present in schema.",
Some(format!("value : {}", e.0)))
}
}
impl ::std::fmt::Display for Error {
fn fmt(&self, fmt : &mut ::std::fmt::Formatter) -> ::std::result::Result<(), ::std::fmt::Error> {
match *self {
Error::Decode { ref description, detail : Some(ref detail) } => {
write!(fmt, "{} {}", description, detail)
},
Error::Decode { ref description,.. } => write!(fmt, "{}", description),
Error::Io(ref io) => io.fmt(fmt),
}
}
}
impl ::std::error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::Decode { ref description,.. } => description,
Error::Io(ref io) => ::std::error::Error::description(io),
}
}
fn cause(&self) -> Option<&::std::error::Error> {
match *self {
Error::Decode {.. } => None,
Error::Io(ref io) => io.cause(),
}
}
}
/// Helper struct that allows `MessageBuilder::get_segments_for_output()` to avoid heap allocations
/// in the single-segment case.
pub enum OutputSegments<'a> {
#[doc(hidden)]
SingleSegment([&'a [Word]; 1]),
#[doc(hidden)]
MultiSegment(Vec<&'a [Word]>),
}
impl <'a> ::std::ops::Deref for OutputSegments<'a> {
type Target = [&'a [Word]];
fn deref<'b>(&'b self) -> &'b [&'a [Word]] {
match self {
&OutputSegments::SingleSegment(ref s) => {
s
}
&OutputSegments::MultiSegment(ref v) => {
&*v
}
}
}
}
|
{
let mut result : Vec<Word> = Vec::with_capacity(length);
unsafe {
result.set_len(length);
let p : *mut u8 = result.as_mut_ptr() as *mut u8;
::std::ptr::write_bytes(p, 0u8, length * ::std::mem::size_of::<Word>());
}
return result;
}
|
identifier_body
|
issue-30079.rs
|
struct SemiPriv;
mod m1 {
struct Priv;
impl ::SemiPriv {
pub fn f(_: Priv) {} //~ WARN private type `m1::Priv` in public interface
//~^ WARNING hard error
}
impl Priv {
pub fn f(_: Priv) {} // ok
}
}
mod m2 {
struct Priv;
impl ::std::ops::Deref for ::SemiPriv {
type Target = Priv; //~ ERROR private type `m2::Priv` in public interface
fn deref(&self) -> &Self::Target { unimplemented!() }
}
impl ::std::ops::Deref for Priv {
type Target = Priv; // ok
fn deref(&self) -> &Self::Target { unimplemented!() }
}
}
trait SemiPrivTrait {
type Assoc;
}
mod m3 {
struct Priv;
impl ::SemiPrivTrait for () {
type Assoc = Priv; //~ ERROR private type `m3::Priv` in public interface
}
}
fn main()
|
{}
|
identifier_body
|
|
issue-30079.rs
|
struct
|
;
mod m1 {
struct Priv;
impl ::SemiPriv {
pub fn f(_: Priv) {} //~ WARN private type `m1::Priv` in public interface
//~^ WARNING hard error
}
impl Priv {
pub fn f(_: Priv) {} // ok
}
}
mod m2 {
struct Priv;
impl ::std::ops::Deref for ::SemiPriv {
type Target = Priv; //~ ERROR private type `m2::Priv` in public interface
fn deref(&self) -> &Self::Target { unimplemented!() }
}
impl ::std::ops::Deref for Priv {
type Target = Priv; // ok
fn deref(&self) -> &Self::Target { unimplemented!() }
}
}
trait SemiPrivTrait {
type Assoc;
}
mod m3 {
struct Priv;
impl ::SemiPrivTrait for () {
type Assoc = Priv; //~ ERROR private type `m3::Priv` in public interface
}
}
fn main() {}
|
SemiPriv
|
identifier_name
|
issue-30079.rs
|
struct SemiPriv;
mod m1 {
struct Priv;
impl ::SemiPriv {
pub fn f(_: Priv) {} //~ WARN private type `m1::Priv` in public interface
//~^ WARNING hard error
}
impl Priv {
pub fn f(_: Priv) {} // ok
}
}
mod m2 {
struct Priv;
impl ::std::ops::Deref for ::SemiPriv {
type Target = Priv; //~ ERROR private type `m2::Priv` in public interface
fn deref(&self) -> &Self::Target { unimplemented!() }
}
|
impl ::std::ops::Deref for Priv {
type Target = Priv; // ok
fn deref(&self) -> &Self::Target { unimplemented!() }
}
}
trait SemiPrivTrait {
type Assoc;
}
mod m3 {
struct Priv;
impl ::SemiPrivTrait for () {
type Assoc = Priv; //~ ERROR private type `m3::Priv` in public interface
}
}
fn main() {}
|
random_line_split
|
|
atomic_load.rs
|
#![feature(core, core_intrinsics)]
extern crate core;
#[cfg(test)]
mod tests {
use core::intrinsics::atomic_load;
use core::cell::UnsafeCell;
use std::sync::Arc;
use std::thread;
// pub fn atomic_load<T>(src: *const T) -> T;
struct A<T> {
v: UnsafeCell<T>
}
unsafe impl Sync for A<T> {}
impl<T> A<T> {
fn new(v: T) -> A<T> {
A { v: UnsafeCell::<T>::new(v) }
}
}
type T = usize;
macro_rules! atomic_load_test {
($value:expr) => ({
let value: T = $value;
let a: A<T> = A::<T>::new(value);
let data: Arc<A<T>> = Arc::<A<T>>::new(a);
let clone: Arc<A<T>> = data.clone();
thread::spawn(move || {
let src: *mut T = clone.v.get();
let result: T = unsafe { atomic_load::<T>(src) };
assert_eq!(result, $value);
});
thread::sleep_ms(10);
let ptr: *mut T = data.v.get();
assert_eq!(unsafe { atomic_load::<T>(ptr) }, $value);
})
}
#[test]
fn
|
() {
atomic_load_test!( 68 );
}
}
|
atomic_load_test1
|
identifier_name
|
atomic_load.rs
|
#![feature(core, core_intrinsics)]
extern crate core;
#[cfg(test)]
mod tests {
use core::intrinsics::atomic_load;
use core::cell::UnsafeCell;
use std::sync::Arc;
use std::thread;
// pub fn atomic_load<T>(src: *const T) -> T;
struct A<T> {
v: UnsafeCell<T>
}
unsafe impl Sync for A<T> {}
impl<T> A<T> {
fn new(v: T) -> A<T>
|
}
type T = usize;
macro_rules! atomic_load_test {
($value:expr) => ({
let value: T = $value;
let a: A<T> = A::<T>::new(value);
let data: Arc<A<T>> = Arc::<A<T>>::new(a);
let clone: Arc<A<T>> = data.clone();
thread::spawn(move || {
let src: *mut T = clone.v.get();
let result: T = unsafe { atomic_load::<T>(src) };
assert_eq!(result, $value);
});
thread::sleep_ms(10);
let ptr: *mut T = data.v.get();
assert_eq!(unsafe { atomic_load::<T>(ptr) }, $value);
})
}
#[test]
fn atomic_load_test1() {
atomic_load_test!( 68 );
}
}
|
{
A { v: UnsafeCell::<T>::new(v) }
}
|
identifier_body
|
atomic_load.rs
|
#![feature(core, core_intrinsics)]
extern crate core;
#[cfg(test)]
mod tests {
use core::intrinsics::atomic_load;
use core::cell::UnsafeCell;
use std::sync::Arc;
use std::thread;
// pub fn atomic_load<T>(src: *const T) -> T;
struct A<T> {
v: UnsafeCell<T>
}
|
fn new(v: T) -> A<T> {
A { v: UnsafeCell::<T>::new(v) }
}
}
type T = usize;
macro_rules! atomic_load_test {
($value:expr) => ({
let value: T = $value;
let a: A<T> = A::<T>::new(value);
let data: Arc<A<T>> = Arc::<A<T>>::new(a);
let clone: Arc<A<T>> = data.clone();
thread::spawn(move || {
let src: *mut T = clone.v.get();
let result: T = unsafe { atomic_load::<T>(src) };
assert_eq!(result, $value);
});
thread::sleep_ms(10);
let ptr: *mut T = data.v.get();
assert_eq!(unsafe { atomic_load::<T>(ptr) }, $value);
})
}
#[test]
fn atomic_load_test1() {
atomic_load_test!( 68 );
}
}
|
unsafe impl Sync for A<T> {}
impl<T> A<T> {
|
random_line_split
|
utils.rs
|
// Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
use std::io;
use super::{RemoveRegionError, MAX_PAGE_COMPACT_BUFFER};
use logger::error;
use vm_memory::{GuestAddress, GuestMemory, GuestMemoryMmap, GuestMemoryRegion};
/// This takes a vector of page frame numbers, and compacts them
/// into ranges of consecutive pages. The result is a vector
/// of (start_page_frame_number, range_length) pairs.
pub(crate) fn compact_page_frame_numbers(v: &mut [u32]) -> Vec<(u32, u32)> {
if v.is_empty() {
return vec![];
}
// Since the total number of pages that can be
// received at once is `MAX_PAGE_COMPACT_BUFFER`,
// this sort does not change the complexity of handling
// an inflation.
v.sort_unstable();
// Since there are at most `MAX_PAGE_COMPACT_BUFFER` pages, setting the
// capacity of `result` to this makes sense.
let mut result = Vec::with_capacity(MAX_PAGE_COMPACT_BUFFER);
// The most recent range of pages is [previous..previous + length).
let mut previous = 0;
let mut length = 1;
for pfn_index in 1..v.len() {
let page_frame_number = v[pfn_index];
// Skip duplicate pages. This will ensure we only consider
// distinct PFNs.
if page_frame_number == v[pfn_index - 1] {
error!("Skipping duplicate PFN {}.", page_frame_number);
continue;
}
// Check if the current page frame number is adjacent to the most recent page range.
// This operation will never overflow because for whatever value `v[previous]`
// has in the u32 range, we know there are at least `length` consecutive numbers
// greater than it in the array (the greatest so far being `page_frame_number`),
// since `v[previous]` is before all of them in the sorted array and `length`
// was incremented for each consecutive one. This is true only because we skip
// duplicates.
if page_frame_number == v[previous] + length {
// If so, extend that range.
length += 1;
} else {
// Otherwise, push (previous, length) to the result vector.
result.push((v[previous], length));
// And update the most recent range of pages.
previous = pfn_index;
length = 1;
}
}
|
result.push((v[previous], length));
result
}
pub(crate) fn remove_range(
guest_memory: &GuestMemoryMmap,
range: (GuestAddress, u64),
restored: bool,
) -> std::result::Result<(), RemoveRegionError> {
let (guest_address, range_len) = range;
if let Some(region) = guest_memory.find_region(guest_address) {
if guest_address.0 + range_len > region.start_addr().0 + region.len() {
return Err(RemoveRegionError::MalformedRange);
}
let phys_address = guest_memory
.get_host_address(guest_address)
.map_err(|_| RemoveRegionError::AddressTranslation)?;
// Mmap a new anonymous region over the present one in order to create a hole.
// This workaround is (only) needed after resuming from a snapshot because the guest memory
// is mmaped from file as private and there is no `madvise` flag that works for this case.
if restored {
let ret = unsafe {
libc::mmap(
phys_address as *mut _,
range_len as usize,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_FIXED | libc::MAP_ANONYMOUS | libc::MAP_PRIVATE,
-1,
0,
)
};
if ret == libc::MAP_FAILED {
return Err(RemoveRegionError::MmapFail(io::Error::last_os_error()));
}
};
// Madvise the region in order to mark it as not used.
let ret = unsafe {
libc::madvise(
phys_address as *mut _,
range_len as usize,
libc::MADV_DONTNEED,
)
};
if ret < 0 {
return Err(RemoveRegionError::MadviseFail(io::Error::last_os_error()));
}
Ok(())
} else {
Err(RemoveRegionError::RegionNotFound)
}
}
#[cfg(test)]
mod tests {
use super::*;
use vm_memory::Bytes;
/// This asserts that $lhs matches $rhs.
macro_rules! assert_match {
($lhs:expr, $rhs:pat) => {{
assert!(matches!($lhs, $rhs))
}};
}
#[test]
fn test_compact_page_indices() {
// Test empty input.
assert!(compact_page_frame_numbers(&mut []).is_empty());
// Test single compact range.
assert_eq!(
compact_page_frame_numbers(&mut (0_u32..100_u32).collect::<Vec<u32>>().as_mut_slice()),
vec![(0, 100)]
);
// `compact_page_frame_numbers` works even when given out of order input.
assert_eq!(
compact_page_frame_numbers(
&mut (0_u32..100_u32).rev().collect::<Vec<u32>>().as_mut_slice()
),
vec![(0, 100)]
);
// Test with 100 distinct ranges.
assert_eq!(
compact_page_frame_numbers(
&mut (0_u32..10000_u32)
.step_by(100)
.flat_map(|x| (x..x + 10).rev())
.collect::<Vec<u32>>()
),
(0_u32..10000_u32)
.step_by(100)
.map(|x| (x, 10_u32))
.collect::<Vec<(u32, u32)>>()
);
// Test range with duplicates.
assert_eq!(
compact_page_frame_numbers(
&mut (0_u32..10000_u32).map(|x| x / 2).collect::<Vec<u32>>()
),
vec![(0, 5000)]
);
// Test there is no overflow when there are duplicate max values.
assert_eq!(
compact_page_frame_numbers(&mut [u32::MAX, u32::MAX]),
vec![(u32::MAX, 1)]
);
}
#[test]
fn test_remove_range() {
let page_size: usize = 0x1000;
let mem = vm_memory::test_utils::create_anon_guest_memory(
&[(GuestAddress(0), 2 * page_size)],
false,
)
.unwrap();
// Fill the memory with ones.
let ones = vec![1u8; 2 * page_size];
mem.write(&ones[..], GuestAddress(0)).unwrap();
// Remove the first page.
assert!(remove_range(&mem, (GuestAddress(0), page_size as u64), false).is_ok());
// Check that the first page is zeroed.
let mut actual_page = vec![0u8; page_size];
mem.read(&mut actual_page.as_mut_slice(), GuestAddress(0))
.unwrap();
assert_eq!(vec![0u8; page_size], actual_page);
// Check that the second page still contains ones.
mem.read(
&mut actual_page.as_mut_slice(),
GuestAddress(page_size as u64),
)
.unwrap();
assert_eq!(vec![1u8; page_size], actual_page);
// Malformed range: the len is too big.
assert_match!(
remove_range(&mem, (GuestAddress(0), 0x10000), false).unwrap_err(),
RemoveRegionError::MalformedRange
);
// Region not mapped.
assert_match!(
remove_range(&mem, (GuestAddress(0x10000), 0x10), false).unwrap_err(),
RemoveRegionError::RegionNotFound
);
// Madvise fail: the guest address is not aligned to the page size.
assert_match!(
remove_range(&mem, (GuestAddress(0x20), page_size as u64), false).unwrap_err(),
RemoveRegionError::MadviseFail(_)
);
}
#[test]
fn test_remove_range_on_restored() {
let page_size: usize = 0x1000;
let mem = vm_memory::test_utils::create_anon_guest_memory(
&[(GuestAddress(0), 2 * page_size)],
false,
)
.unwrap();
// Fill the memory with ones.
let ones = vec![1u8; 2 * page_size];
mem.write(&ones[..], GuestAddress(0)).unwrap();
// Remove the first page.
assert!(remove_range(&mem, (GuestAddress(0), page_size as u64), true).is_ok());
// Check that the first page is zeroed.
let mut actual_page = vec![0u8; page_size];
mem.read(&mut actual_page.as_mut_slice(), GuestAddress(0))
.unwrap();
assert_eq!(vec![0u8; page_size], actual_page);
// Check that the second page still contains ones.
mem.read(
&mut actual_page.as_mut_slice(),
GuestAddress(page_size as u64),
)
.unwrap();
assert_eq!(vec![1u8; page_size], actual_page);
// Malformed range: the len is too big.
assert_match!(
remove_range(&mem, (GuestAddress(0), 0x10000), true).unwrap_err(),
RemoveRegionError::MalformedRange
);
// Region not mapped.
assert_match!(
remove_range(&mem, (GuestAddress(0x10000), 0x10), true).unwrap_err(),
RemoveRegionError::RegionNotFound
);
// Mmap fail: the guest address is not aligned to the page size.
assert_match!(
remove_range(&mem, (GuestAddress(0x20), page_size as u64), true).unwrap_err(),
RemoveRegionError::MmapFail(_)
);
}
/// -------------------------------------
/// BEGIN PROPERTY BASED TESTING
use proptest::prelude::*;
fn random_pfn_u32_max() -> impl Strategy<Value = Vec<u32>> {
// Create a randomly sized vec (max MAX_PAGE_COMPACT_BUFFER elements) filled with random u32 elements.
prop::collection::vec(0..std::u32::MAX, 0..MAX_PAGE_COMPACT_BUFFER)
}
fn random_pfn_100() -> impl Strategy<Value = Vec<u32>> {
// Create a randomly sized vec (max MAX_PAGE_COMPACT_BUFFER/8) filled with random u32 elements (0 - 100).
prop::collection::vec(0..100u32, 0..MAX_PAGE_COMPACT_BUFFER / 8)
}
// The uncompactor will output deduplicated and sorted elements as compaction algorithm
// guarantees it.
fn uncompact(compacted: Vec<(u32, u32)>) -> Vec<u32> {
let mut result = Vec::new();
for (start, len) in compacted {
result.extend(start..start + len);
}
result
}
fn sort_and_dedup<T: Ord + Clone>(v: &[T]) -> Vec<T> {
let mut sorted_v = v.to_vec();
sorted_v.sort_unstable();
sorted_v.dedup();
sorted_v
}
// The below prop tests will validate the following output properties:
// - vec elements are sorted by first tuple value
// - no pfn duplicates are present
// - no pfn is lost
#[test]
fn test_pfn_compact() {
let cfg = ProptestConfig::with_cases(1500);
proptest!(cfg, |(mut input1 in random_pfn_u32_max(), mut input2 in random_pfn_100())| {
// The uncompactor will output sorted elements.
prop_assert!(
uncompact(compact_page_frame_numbers(input1.as_mut_slice()))
== sort_and_dedup(input1.as_slice())
);
// Input2 will ensure duplicate PFN cases are also covered.
prop_assert!(
uncompact(compact_page_frame_numbers(input2.as_mut_slice()))
== sort_and_dedup(input2.as_slice())
);
});
}
}
|
// Don't forget to push the last range to the result.
|
random_line_split
|
utils.rs
|
// Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
use std::io;
use super::{RemoveRegionError, MAX_PAGE_COMPACT_BUFFER};
use logger::error;
use vm_memory::{GuestAddress, GuestMemory, GuestMemoryMmap, GuestMemoryRegion};
/// This takes a vector of page frame numbers, and compacts them
/// into ranges of consecutive pages. The result is a vector
/// of (start_page_frame_number, range_length) pairs.
pub(crate) fn compact_page_frame_numbers(v: &mut [u32]) -> Vec<(u32, u32)> {
if v.is_empty() {
return vec![];
}
// Since the total number of pages that can be
// received at once is `MAX_PAGE_COMPACT_BUFFER`,
// this sort does not change the complexity of handling
// an inflation.
v.sort_unstable();
// Since there are at most `MAX_PAGE_COMPACT_BUFFER` pages, setting the
// capacity of `result` to this makes sense.
let mut result = Vec::with_capacity(MAX_PAGE_COMPACT_BUFFER);
// The most recent range of pages is [previous..previous + length).
let mut previous = 0;
let mut length = 1;
for pfn_index in 1..v.len() {
let page_frame_number = v[pfn_index];
// Skip duplicate pages. This will ensure we only consider
// distinct PFNs.
if page_frame_number == v[pfn_index - 1] {
error!("Skipping duplicate PFN {}.", page_frame_number);
continue;
}
// Check if the current page frame number is adjacent to the most recent page range.
// This operation will never overflow because for whatever value `v[previous]`
// has in the u32 range, we know there are at least `length` consecutive numbers
// greater than it in the array (the greatest so far being `page_frame_number`),
// since `v[previous]` is before all of them in the sorted array and `length`
// was incremented for each consecutive one. This is true only because we skip
// duplicates.
if page_frame_number == v[previous] + length {
// If so, extend that range.
length += 1;
} else {
// Otherwise, push (previous, length) to the result vector.
result.push((v[previous], length));
// And update the most recent range of pages.
previous = pfn_index;
length = 1;
}
}
// Don't forget to push the last range to the result.
result.push((v[previous], length));
result
}
pub(crate) fn remove_range(
guest_memory: &GuestMemoryMmap,
range: (GuestAddress, u64),
restored: bool,
) -> std::result::Result<(), RemoveRegionError> {
let (guest_address, range_len) = range;
if let Some(region) = guest_memory.find_region(guest_address) {
if guest_address.0 + range_len > region.start_addr().0 + region.len() {
return Err(RemoveRegionError::MalformedRange);
}
let phys_address = guest_memory
.get_host_address(guest_address)
.map_err(|_| RemoveRegionError::AddressTranslation)?;
// Mmap a new anonymous region over the present one in order to create a hole.
// This workaround is (only) needed after resuming from a snapshot because the guest memory
// is mmaped from file as private and there is no `madvise` flag that works for this case.
if restored
|
;
// Madvise the region in order to mark it as not used.
let ret = unsafe {
libc::madvise(
phys_address as *mut _,
range_len as usize,
libc::MADV_DONTNEED,
)
};
if ret < 0 {
return Err(RemoveRegionError::MadviseFail(io::Error::last_os_error()));
}
Ok(())
} else {
Err(RemoveRegionError::RegionNotFound)
}
}
#[cfg(test)]
mod tests {
use super::*;
use vm_memory::Bytes;
/// This asserts that $lhs matches $rhs.
macro_rules! assert_match {
($lhs:expr, $rhs:pat) => {{
assert!(matches!($lhs, $rhs))
}};
}
#[test]
fn test_compact_page_indices() {
// Test empty input.
assert!(compact_page_frame_numbers(&mut []).is_empty());
// Test single compact range.
assert_eq!(
compact_page_frame_numbers(&mut (0_u32..100_u32).collect::<Vec<u32>>().as_mut_slice()),
vec![(0, 100)]
);
// `compact_page_frame_numbers` works even when given out of order input.
assert_eq!(
compact_page_frame_numbers(
&mut (0_u32..100_u32).rev().collect::<Vec<u32>>().as_mut_slice()
),
vec![(0, 100)]
);
// Test with 100 distinct ranges.
assert_eq!(
compact_page_frame_numbers(
&mut (0_u32..10000_u32)
.step_by(100)
.flat_map(|x| (x..x + 10).rev())
.collect::<Vec<u32>>()
),
(0_u32..10000_u32)
.step_by(100)
.map(|x| (x, 10_u32))
.collect::<Vec<(u32, u32)>>()
);
// Test range with duplicates.
assert_eq!(
compact_page_frame_numbers(
&mut (0_u32..10000_u32).map(|x| x / 2).collect::<Vec<u32>>()
),
vec![(0, 5000)]
);
// Test there is no overflow when there are duplicate max values.
assert_eq!(
compact_page_frame_numbers(&mut [u32::MAX, u32::MAX]),
vec![(u32::MAX, 1)]
);
}
#[test]
fn test_remove_range() {
let page_size: usize = 0x1000;
let mem = vm_memory::test_utils::create_anon_guest_memory(
&[(GuestAddress(0), 2 * page_size)],
false,
)
.unwrap();
// Fill the memory with ones.
let ones = vec![1u8; 2 * page_size];
mem.write(&ones[..], GuestAddress(0)).unwrap();
// Remove the first page.
assert!(remove_range(&mem, (GuestAddress(0), page_size as u64), false).is_ok());
// Check that the first page is zeroed.
let mut actual_page = vec![0u8; page_size];
mem.read(&mut actual_page.as_mut_slice(), GuestAddress(0))
.unwrap();
assert_eq!(vec![0u8; page_size], actual_page);
// Check that the second page still contains ones.
mem.read(
&mut actual_page.as_mut_slice(),
GuestAddress(page_size as u64),
)
.unwrap();
assert_eq!(vec![1u8; page_size], actual_page);
// Malformed range: the len is too big.
assert_match!(
remove_range(&mem, (GuestAddress(0), 0x10000), false).unwrap_err(),
RemoveRegionError::MalformedRange
);
// Region not mapped.
assert_match!(
remove_range(&mem, (GuestAddress(0x10000), 0x10), false).unwrap_err(),
RemoveRegionError::RegionNotFound
);
// Madvise fail: the guest address is not aligned to the page size.
assert_match!(
remove_range(&mem, (GuestAddress(0x20), page_size as u64), false).unwrap_err(),
RemoveRegionError::MadviseFail(_)
);
}
#[test]
fn test_remove_range_on_restored() {
let page_size: usize = 0x1000;
let mem = vm_memory::test_utils::create_anon_guest_memory(
&[(GuestAddress(0), 2 * page_size)],
false,
)
.unwrap();
// Fill the memory with ones.
let ones = vec![1u8; 2 * page_size];
mem.write(&ones[..], GuestAddress(0)).unwrap();
// Remove the first page.
assert!(remove_range(&mem, (GuestAddress(0), page_size as u64), true).is_ok());
// Check that the first page is zeroed.
let mut actual_page = vec![0u8; page_size];
mem.read(&mut actual_page.as_mut_slice(), GuestAddress(0))
.unwrap();
assert_eq!(vec![0u8; page_size], actual_page);
// Check that the second page still contains ones.
mem.read(
&mut actual_page.as_mut_slice(),
GuestAddress(page_size as u64),
)
.unwrap();
assert_eq!(vec![1u8; page_size], actual_page);
// Malformed range: the len is too big.
assert_match!(
remove_range(&mem, (GuestAddress(0), 0x10000), true).unwrap_err(),
RemoveRegionError::MalformedRange
);
// Region not mapped.
assert_match!(
remove_range(&mem, (GuestAddress(0x10000), 0x10), true).unwrap_err(),
RemoveRegionError::RegionNotFound
);
// Mmap fail: the guest address is not aligned to the page size.
assert_match!(
remove_range(&mem, (GuestAddress(0x20), page_size as u64), true).unwrap_err(),
RemoveRegionError::MmapFail(_)
);
}
/// -------------------------------------
/// BEGIN PROPERTY BASED TESTING
use proptest::prelude::*;
fn random_pfn_u32_max() -> impl Strategy<Value = Vec<u32>> {
// Create a randomly sized vec (max MAX_PAGE_COMPACT_BUFFER elements) filled with random u32 elements.
prop::collection::vec(0..std::u32::MAX, 0..MAX_PAGE_COMPACT_BUFFER)
}
fn random_pfn_100() -> impl Strategy<Value = Vec<u32>> {
// Create a randomly sized vec (max MAX_PAGE_COMPACT_BUFFER/8) filled with random u32 elements (0 - 100).
prop::collection::vec(0..100u32, 0..MAX_PAGE_COMPACT_BUFFER / 8)
}
// The uncompactor will output deduplicated and sorted elements as compaction algorithm
// guarantees it.
fn uncompact(compacted: Vec<(u32, u32)>) -> Vec<u32> {
let mut result = Vec::new();
for (start, len) in compacted {
result.extend(start..start + len);
}
result
}
fn sort_and_dedup<T: Ord + Clone>(v: &[T]) -> Vec<T> {
let mut sorted_v = v.to_vec();
sorted_v.sort_unstable();
sorted_v.dedup();
sorted_v
}
// The below prop tests will validate the following output properties:
// - vec elements are sorted by first tuple value
// - no pfn duplicates are present
// - no pfn is lost
#[test]
fn test_pfn_compact() {
let cfg = ProptestConfig::with_cases(1500);
proptest!(cfg, |(mut input1 in random_pfn_u32_max(), mut input2 in random_pfn_100())| {
// The uncompactor will output sorted elements.
prop_assert!(
uncompact(compact_page_frame_numbers(input1.as_mut_slice()))
== sort_and_dedup(input1.as_slice())
);
// Input2 will ensure duplicate PFN cases are also covered.
prop_assert!(
uncompact(compact_page_frame_numbers(input2.as_mut_slice()))
== sort_and_dedup(input2.as_slice())
);
});
}
}
|
{
let ret = unsafe {
libc::mmap(
phys_address as *mut _,
range_len as usize,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_FIXED | libc::MAP_ANONYMOUS | libc::MAP_PRIVATE,
-1,
0,
)
};
if ret == libc::MAP_FAILED {
return Err(RemoveRegionError::MmapFail(io::Error::last_os_error()));
}
}
|
conditional_block
|
utils.rs
|
// Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
use std::io;
use super::{RemoveRegionError, MAX_PAGE_COMPACT_BUFFER};
use logger::error;
use vm_memory::{GuestAddress, GuestMemory, GuestMemoryMmap, GuestMemoryRegion};
/// This takes a vector of page frame numbers, and compacts them
/// into ranges of consecutive pages. The result is a vector
/// of (start_page_frame_number, range_length) pairs.
pub(crate) fn compact_page_frame_numbers(v: &mut [u32]) -> Vec<(u32, u32)> {
if v.is_empty() {
return vec![];
}
// Since the total number of pages that can be
// received at once is `MAX_PAGE_COMPACT_BUFFER`,
// this sort does not change the complexity of handling
// an inflation.
v.sort_unstable();
// Since there are at most `MAX_PAGE_COMPACT_BUFFER` pages, setting the
// capacity of `result` to this makes sense.
let mut result = Vec::with_capacity(MAX_PAGE_COMPACT_BUFFER);
// The most recent range of pages is [previous..previous + length).
let mut previous = 0;
let mut length = 1;
for pfn_index in 1..v.len() {
let page_frame_number = v[pfn_index];
// Skip duplicate pages. This will ensure we only consider
// distinct PFNs.
if page_frame_number == v[pfn_index - 1] {
error!("Skipping duplicate PFN {}.", page_frame_number);
continue;
}
// Check if the current page frame number is adjacent to the most recent page range.
// This operation will never overflow because for whatever value `v[previous]`
// has in the u32 range, we know there are at least `length` consecutive numbers
// greater than it in the array (the greatest so far being `page_frame_number`),
// since `v[previous]` is before all of them in the sorted array and `length`
// was incremented for each consecutive one. This is true only because we skip
// duplicates.
if page_frame_number == v[previous] + length {
// If so, extend that range.
length += 1;
} else {
// Otherwise, push (previous, length) to the result vector.
result.push((v[previous], length));
// And update the most recent range of pages.
previous = pfn_index;
length = 1;
}
}
// Don't forget to push the last range to the result.
result.push((v[previous], length));
result
}
pub(crate) fn remove_range(
guest_memory: &GuestMemoryMmap,
range: (GuestAddress, u64),
restored: bool,
) -> std::result::Result<(), RemoveRegionError> {
let (guest_address, range_len) = range;
if let Some(region) = guest_memory.find_region(guest_address) {
if guest_address.0 + range_len > region.start_addr().0 + region.len() {
return Err(RemoveRegionError::MalformedRange);
}
let phys_address = guest_memory
.get_host_address(guest_address)
.map_err(|_| RemoveRegionError::AddressTranslation)?;
// Mmap a new anonymous region over the present one in order to create a hole.
// This workaround is (only) needed after resuming from a snapshot because the guest memory
// is mmaped from file as private and there is no `madvise` flag that works for this case.
if restored {
let ret = unsafe {
libc::mmap(
phys_address as *mut _,
range_len as usize,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_FIXED | libc::MAP_ANONYMOUS | libc::MAP_PRIVATE,
-1,
0,
)
};
if ret == libc::MAP_FAILED {
return Err(RemoveRegionError::MmapFail(io::Error::last_os_error()));
}
};
// Madvise the region in order to mark it as not used.
let ret = unsafe {
libc::madvise(
phys_address as *mut _,
range_len as usize,
libc::MADV_DONTNEED,
)
};
if ret < 0 {
return Err(RemoveRegionError::MadviseFail(io::Error::last_os_error()));
}
Ok(())
} else {
Err(RemoveRegionError::RegionNotFound)
}
}
#[cfg(test)]
mod tests {
use super::*;
use vm_memory::Bytes;
/// This asserts that $lhs matches $rhs.
macro_rules! assert_match {
($lhs:expr, $rhs:pat) => {{
assert!(matches!($lhs, $rhs))
}};
}
#[test]
fn test_compact_page_indices() {
// Test empty input.
assert!(compact_page_frame_numbers(&mut []).is_empty());
// Test single compact range.
assert_eq!(
compact_page_frame_numbers(&mut (0_u32..100_u32).collect::<Vec<u32>>().as_mut_slice()),
vec![(0, 100)]
);
// `compact_page_frame_numbers` works even when given out of order input.
assert_eq!(
compact_page_frame_numbers(
&mut (0_u32..100_u32).rev().collect::<Vec<u32>>().as_mut_slice()
),
vec![(0, 100)]
);
// Test with 100 distinct ranges.
assert_eq!(
compact_page_frame_numbers(
&mut (0_u32..10000_u32)
.step_by(100)
.flat_map(|x| (x..x + 10).rev())
.collect::<Vec<u32>>()
),
(0_u32..10000_u32)
.step_by(100)
.map(|x| (x, 10_u32))
.collect::<Vec<(u32, u32)>>()
);
// Test range with duplicates.
assert_eq!(
compact_page_frame_numbers(
&mut (0_u32..10000_u32).map(|x| x / 2).collect::<Vec<u32>>()
),
vec![(0, 5000)]
);
// Test there is no overflow when there are duplicate max values.
assert_eq!(
compact_page_frame_numbers(&mut [u32::MAX, u32::MAX]),
vec![(u32::MAX, 1)]
);
}
#[test]
fn test_remove_range() {
let page_size: usize = 0x1000;
let mem = vm_memory::test_utils::create_anon_guest_memory(
&[(GuestAddress(0), 2 * page_size)],
false,
)
.unwrap();
// Fill the memory with ones.
let ones = vec![1u8; 2 * page_size];
mem.write(&ones[..], GuestAddress(0)).unwrap();
// Remove the first page.
assert!(remove_range(&mem, (GuestAddress(0), page_size as u64), false).is_ok());
// Check that the first page is zeroed.
let mut actual_page = vec![0u8; page_size];
mem.read(&mut actual_page.as_mut_slice(), GuestAddress(0))
.unwrap();
assert_eq!(vec![0u8; page_size], actual_page);
// Check that the second page still contains ones.
mem.read(
&mut actual_page.as_mut_slice(),
GuestAddress(page_size as u64),
)
.unwrap();
assert_eq!(vec![1u8; page_size], actual_page);
// Malformed range: the len is too big.
assert_match!(
remove_range(&mem, (GuestAddress(0), 0x10000), false).unwrap_err(),
RemoveRegionError::MalformedRange
);
// Region not mapped.
assert_match!(
remove_range(&mem, (GuestAddress(0x10000), 0x10), false).unwrap_err(),
RemoveRegionError::RegionNotFound
);
// Madvise fail: the guest address is not aligned to the page size.
assert_match!(
remove_range(&mem, (GuestAddress(0x20), page_size as u64), false).unwrap_err(),
RemoveRegionError::MadviseFail(_)
);
}
#[test]
fn
|
() {
let page_size: usize = 0x1000;
let mem = vm_memory::test_utils::create_anon_guest_memory(
&[(GuestAddress(0), 2 * page_size)],
false,
)
.unwrap();
// Fill the memory with ones.
let ones = vec![1u8; 2 * page_size];
mem.write(&ones[..], GuestAddress(0)).unwrap();
// Remove the first page.
assert!(remove_range(&mem, (GuestAddress(0), page_size as u64), true).is_ok());
// Check that the first page is zeroed.
let mut actual_page = vec![0u8; page_size];
mem.read(&mut actual_page.as_mut_slice(), GuestAddress(0))
.unwrap();
assert_eq!(vec![0u8; page_size], actual_page);
// Check that the second page still contains ones.
mem.read(
&mut actual_page.as_mut_slice(),
GuestAddress(page_size as u64),
)
.unwrap();
assert_eq!(vec![1u8; page_size], actual_page);
// Malformed range: the len is too big.
assert_match!(
remove_range(&mem, (GuestAddress(0), 0x10000), true).unwrap_err(),
RemoveRegionError::MalformedRange
);
// Region not mapped.
assert_match!(
remove_range(&mem, (GuestAddress(0x10000), 0x10), true).unwrap_err(),
RemoveRegionError::RegionNotFound
);
// Mmap fail: the guest address is not aligned to the page size.
assert_match!(
remove_range(&mem, (GuestAddress(0x20), page_size as u64), true).unwrap_err(),
RemoveRegionError::MmapFail(_)
);
}
/// -------------------------------------
/// BEGIN PROPERTY BASED TESTING
use proptest::prelude::*;
fn random_pfn_u32_max() -> impl Strategy<Value = Vec<u32>> {
// Create a randomly sized vec (max MAX_PAGE_COMPACT_BUFFER elements) filled with random u32 elements.
prop::collection::vec(0..std::u32::MAX, 0..MAX_PAGE_COMPACT_BUFFER)
}
fn random_pfn_100() -> impl Strategy<Value = Vec<u32>> {
// Create a randomly sized vec (max MAX_PAGE_COMPACT_BUFFER/8) filled with random u32 elements (0 - 100).
prop::collection::vec(0..100u32, 0..MAX_PAGE_COMPACT_BUFFER / 8)
}
// The uncompactor will output deduplicated and sorted elements as compaction algorithm
// guarantees it.
fn uncompact(compacted: Vec<(u32, u32)>) -> Vec<u32> {
let mut result = Vec::new();
for (start, len) in compacted {
result.extend(start..start + len);
}
result
}
fn sort_and_dedup<T: Ord + Clone>(v: &[T]) -> Vec<T> {
let mut sorted_v = v.to_vec();
sorted_v.sort_unstable();
sorted_v.dedup();
sorted_v
}
// The below prop tests will validate the following output properties:
// - vec elements are sorted by first tuple value
// - no pfn duplicates are present
// - no pfn is lost
#[test]
fn test_pfn_compact() {
let cfg = ProptestConfig::with_cases(1500);
proptest!(cfg, |(mut input1 in random_pfn_u32_max(), mut input2 in random_pfn_100())| {
// The uncompactor will output sorted elements.
prop_assert!(
uncompact(compact_page_frame_numbers(input1.as_mut_slice()))
== sort_and_dedup(input1.as_slice())
);
// Input2 will ensure duplicate PFN cases are also covered.
prop_assert!(
uncompact(compact_page_frame_numbers(input2.as_mut_slice()))
== sort_and_dedup(input2.as_slice())
);
});
}
}
|
test_remove_range_on_restored
|
identifier_name
|
utils.rs
|
// Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
use std::io;
use super::{RemoveRegionError, MAX_PAGE_COMPACT_BUFFER};
use logger::error;
use vm_memory::{GuestAddress, GuestMemory, GuestMemoryMmap, GuestMemoryRegion};
/// This takes a vector of page frame numbers, and compacts them
/// into ranges of consecutive pages. The result is a vector
/// of (start_page_frame_number, range_length) pairs.
pub(crate) fn compact_page_frame_numbers(v: &mut [u32]) -> Vec<(u32, u32)> {
if v.is_empty() {
return vec![];
}
// Since the total number of pages that can be
// received at once is `MAX_PAGE_COMPACT_BUFFER`,
// this sort does not change the complexity of handling
// an inflation.
v.sort_unstable();
// Since there are at most `MAX_PAGE_COMPACT_BUFFER` pages, setting the
// capacity of `result` to this makes sense.
let mut result = Vec::with_capacity(MAX_PAGE_COMPACT_BUFFER);
// The most recent range of pages is [previous..previous + length).
let mut previous = 0;
let mut length = 1;
for pfn_index in 1..v.len() {
let page_frame_number = v[pfn_index];
// Skip duplicate pages. This will ensure we only consider
// distinct PFNs.
if page_frame_number == v[pfn_index - 1] {
error!("Skipping duplicate PFN {}.", page_frame_number);
continue;
}
// Check if the current page frame number is adjacent to the most recent page range.
// This operation will never overflow because for whatever value `v[previous]`
// has in the u32 range, we know there are at least `length` consecutive numbers
// greater than it in the array (the greatest so far being `page_frame_number`),
// since `v[previous]` is before all of them in the sorted array and `length`
// was incremented for each consecutive one. This is true only because we skip
// duplicates.
if page_frame_number == v[previous] + length {
// If so, extend that range.
length += 1;
} else {
// Otherwise, push (previous, length) to the result vector.
result.push((v[previous], length));
// And update the most recent range of pages.
previous = pfn_index;
length = 1;
}
}
// Don't forget to push the last range to the result.
result.push((v[previous], length));
result
}
pub(crate) fn remove_range(
guest_memory: &GuestMemoryMmap,
range: (GuestAddress, u64),
restored: bool,
) -> std::result::Result<(), RemoveRegionError> {
let (guest_address, range_len) = range;
if let Some(region) = guest_memory.find_region(guest_address) {
if guest_address.0 + range_len > region.start_addr().0 + region.len() {
return Err(RemoveRegionError::MalformedRange);
}
let phys_address = guest_memory
.get_host_address(guest_address)
.map_err(|_| RemoveRegionError::AddressTranslation)?;
// Mmap a new anonymous region over the present one in order to create a hole.
// This workaround is (only) needed after resuming from a snapshot because the guest memory
// is mmaped from file as private and there is no `madvise` flag that works for this case.
if restored {
let ret = unsafe {
libc::mmap(
phys_address as *mut _,
range_len as usize,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_FIXED | libc::MAP_ANONYMOUS | libc::MAP_PRIVATE,
-1,
0,
)
};
if ret == libc::MAP_FAILED {
return Err(RemoveRegionError::MmapFail(io::Error::last_os_error()));
}
};
// Madvise the region in order to mark it as not used.
let ret = unsafe {
libc::madvise(
phys_address as *mut _,
range_len as usize,
libc::MADV_DONTNEED,
)
};
if ret < 0 {
return Err(RemoveRegionError::MadviseFail(io::Error::last_os_error()));
}
Ok(())
} else {
Err(RemoveRegionError::RegionNotFound)
}
}
#[cfg(test)]
mod tests {
use super::*;
use vm_memory::Bytes;
/// This asserts that $lhs matches $rhs.
macro_rules! assert_match {
($lhs:expr, $rhs:pat) => {{
assert!(matches!($lhs, $rhs))
}};
}
#[test]
fn test_compact_page_indices() {
// Test empty input.
assert!(compact_page_frame_numbers(&mut []).is_empty());
// Test single compact range.
assert_eq!(
compact_page_frame_numbers(&mut (0_u32..100_u32).collect::<Vec<u32>>().as_mut_slice()),
vec![(0, 100)]
);
// `compact_page_frame_numbers` works even when given out of order input.
assert_eq!(
compact_page_frame_numbers(
&mut (0_u32..100_u32).rev().collect::<Vec<u32>>().as_mut_slice()
),
vec![(0, 100)]
);
// Test with 100 distinct ranges.
assert_eq!(
compact_page_frame_numbers(
&mut (0_u32..10000_u32)
.step_by(100)
.flat_map(|x| (x..x + 10).rev())
.collect::<Vec<u32>>()
),
(0_u32..10000_u32)
.step_by(100)
.map(|x| (x, 10_u32))
.collect::<Vec<(u32, u32)>>()
);
// Test range with duplicates.
assert_eq!(
compact_page_frame_numbers(
&mut (0_u32..10000_u32).map(|x| x / 2).collect::<Vec<u32>>()
),
vec![(0, 5000)]
);
// Test there is no overflow when there are duplicate max values.
assert_eq!(
compact_page_frame_numbers(&mut [u32::MAX, u32::MAX]),
vec![(u32::MAX, 1)]
);
}
#[test]
fn test_remove_range() {
let page_size: usize = 0x1000;
let mem = vm_memory::test_utils::create_anon_guest_memory(
&[(GuestAddress(0), 2 * page_size)],
false,
)
.unwrap();
// Fill the memory with ones.
let ones = vec![1u8; 2 * page_size];
mem.write(&ones[..], GuestAddress(0)).unwrap();
// Remove the first page.
assert!(remove_range(&mem, (GuestAddress(0), page_size as u64), false).is_ok());
// Check that the first page is zeroed.
let mut actual_page = vec![0u8; page_size];
mem.read(&mut actual_page.as_mut_slice(), GuestAddress(0))
.unwrap();
assert_eq!(vec![0u8; page_size], actual_page);
// Check that the second page still contains ones.
mem.read(
&mut actual_page.as_mut_slice(),
GuestAddress(page_size as u64),
)
.unwrap();
assert_eq!(vec![1u8; page_size], actual_page);
// Malformed range: the len is too big.
assert_match!(
remove_range(&mem, (GuestAddress(0), 0x10000), false).unwrap_err(),
RemoveRegionError::MalformedRange
);
// Region not mapped.
assert_match!(
remove_range(&mem, (GuestAddress(0x10000), 0x10), false).unwrap_err(),
RemoveRegionError::RegionNotFound
);
// Madvise fail: the guest address is not aligned to the page size.
assert_match!(
remove_range(&mem, (GuestAddress(0x20), page_size as u64), false).unwrap_err(),
RemoveRegionError::MadviseFail(_)
);
}
#[test]
fn test_remove_range_on_restored() {
let page_size: usize = 0x1000;
let mem = vm_memory::test_utils::create_anon_guest_memory(
&[(GuestAddress(0), 2 * page_size)],
false,
)
.unwrap();
// Fill the memory with ones.
let ones = vec![1u8; 2 * page_size];
mem.write(&ones[..], GuestAddress(0)).unwrap();
// Remove the first page.
assert!(remove_range(&mem, (GuestAddress(0), page_size as u64), true).is_ok());
// Check that the first page is zeroed.
let mut actual_page = vec![0u8; page_size];
mem.read(&mut actual_page.as_mut_slice(), GuestAddress(0))
.unwrap();
assert_eq!(vec![0u8; page_size], actual_page);
// Check that the second page still contains ones.
mem.read(
&mut actual_page.as_mut_slice(),
GuestAddress(page_size as u64),
)
.unwrap();
assert_eq!(vec![1u8; page_size], actual_page);
// Malformed range: the len is too big.
assert_match!(
remove_range(&mem, (GuestAddress(0), 0x10000), true).unwrap_err(),
RemoveRegionError::MalformedRange
);
// Region not mapped.
assert_match!(
remove_range(&mem, (GuestAddress(0x10000), 0x10), true).unwrap_err(),
RemoveRegionError::RegionNotFound
);
// Mmap fail: the guest address is not aligned to the page size.
assert_match!(
remove_range(&mem, (GuestAddress(0x20), page_size as u64), true).unwrap_err(),
RemoveRegionError::MmapFail(_)
);
}
/// -------------------------------------
/// BEGIN PROPERTY BASED TESTING
use proptest::prelude::*;
fn random_pfn_u32_max() -> impl Strategy<Value = Vec<u32>>
|
fn random_pfn_100() -> impl Strategy<Value = Vec<u32>> {
// Create a randomly sized vec (max MAX_PAGE_COMPACT_BUFFER/8) filled with random u32 elements (0 - 100).
prop::collection::vec(0..100u32, 0..MAX_PAGE_COMPACT_BUFFER / 8)
}
// The uncompactor will output deduplicated and sorted elements as compaction algorithm
// guarantees it.
fn uncompact(compacted: Vec<(u32, u32)>) -> Vec<u32> {
let mut result = Vec::new();
for (start, len) in compacted {
result.extend(start..start + len);
}
result
}
fn sort_and_dedup<T: Ord + Clone>(v: &[T]) -> Vec<T> {
let mut sorted_v = v.to_vec();
sorted_v.sort_unstable();
sorted_v.dedup();
sorted_v
}
// The below prop tests will validate the following output properties:
// - vec elements are sorted by first tuple value
// - no pfn duplicates are present
// - no pfn is lost
#[test]
fn test_pfn_compact() {
let cfg = ProptestConfig::with_cases(1500);
proptest!(cfg, |(mut input1 in random_pfn_u32_max(), mut input2 in random_pfn_100())| {
// The uncompactor will output sorted elements.
prop_assert!(
uncompact(compact_page_frame_numbers(input1.as_mut_slice()))
== sort_and_dedup(input1.as_slice())
);
// Input2 will ensure duplicate PFN cases are also covered.
prop_assert!(
uncompact(compact_page_frame_numbers(input2.as_mut_slice()))
== sort_and_dedup(input2.as_slice())
);
});
}
}
|
{
// Create a randomly sized vec (max MAX_PAGE_COMPACT_BUFFER elements) filled with random u32 elements.
prop::collection::vec(0..std::u32::MAX, 0..MAX_PAGE_COMPACT_BUFFER)
}
|
identifier_body
|
stability.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A pass that annotates every item and method with its stability level,
//! propagating default levels lexically from parent to children ast nodes.
use util::nodemap::{NodeMap, DefIdMap};
use syntax::codemap::Span;
use syntax::{attr, visit};
use syntax::ast;
use syntax::ast::{Attribute, Block, Crate, DefId, FnDecl, NodeId, Variant};
use syntax::ast::{Item, RequiredMethod, ProvidedMethod, TraitItem};
use syntax::ast::{TypeMethod, Method, Generics, StructField, TypeTraitItem};
use syntax::ast_util::is_local;
use syntax::attr::Stability;
use syntax::visit::{FnKind, FkMethod, Visitor};
use middle::ty;
use metadata::csearch;
use std::mem::replace;
/// A stability index, giving the stability level for items and methods.
pub struct Index {
// stability for crate-local items; unmarked stability == no entry
local: NodeMap<Stability>,
// cache for extern-crate items; unmarked stability == entry with None
extern_cache: DefIdMap<Option<Stability>>
}
// A private tree-walker for producing an Index.
struct Annotator {
index: Index,
parent: Option<Stability>
}
impl Annotator {
// Determine the stability for a node based on its attributes and inherited
// stability. The stability is recorded in the index and used as the parent.
fn annotate(&mut self, id: NodeId, attrs: &Vec<Attribute>, f: |&mut Annotator|) {
match attr::find_stability(attrs.as_slice()) {
Some(stab) => {
self.index.local.insert(id, stab.clone());
// Don't inherit #[stable]
if stab.level!= attr::Stable {
let parent = replace(&mut self.parent, Some(stab));
f(self);
self.parent = parent;
} else {
f(self);
}
}
None => {
self.parent.clone().map(|stab| self.index.local.insert(id, stab));
f(self);
}
}
}
}
impl<'v> Visitor<'v> for Annotator {
fn visit_item(&mut self, i: &Item) {
self.annotate(i.id, &i.attrs, |v| visit::walk_item(v, i));
match i.node {
ast::ItemStruct(ref sd, _) => {
sd.ctor_id.map(|id| {
self.annotate(id, &i.attrs, |_| {})
});
}
_ => {}
}
}
fn visit_fn(&mut self, fk: FnKind<'v>, _: &'v FnDecl,
_: &'v Block, _: Span, _: NodeId) {
match fk {
FkMethod(_, _, meth) => {
// Methods are not already annotated, so we annotate it
self.annotate(meth.id, &meth.attrs, |_| {});
}
_ => {}
}
// Items defined in a function body have no reason to have
// a stability attribute, so we don't recurse.
}
fn visit_trait_item(&mut self, t: &TraitItem) {
let (id, attrs) = match *t {
RequiredMethod(TypeMethod {id, ref attrs,..}) => (id, attrs),
// work around lack of pattern matching for @ types
ProvidedMethod(ref method) => {
match **method {
Method {ref attrs, id,..} => (id, attrs),
}
}
TypeTraitItem(ref typedef) => (typedef.ty_param.id, &typedef.attrs),
};
self.annotate(id, attrs, |v| visit::walk_trait_item(v, t));
}
fn
|
(&mut self, var: &Variant, g: &'v Generics) {
self.annotate(var.node.id, &var.node.attrs, |v| visit::walk_variant(v, var, g))
}
fn visit_struct_field(&mut self, s: &StructField) {
self.annotate(s.node.id, &s.node.attrs, |v| visit::walk_struct_field(v, s));
}
}
impl Index {
/// Construct the stability index for a crate being compiled.
pub fn build(krate: &Crate) -> Index {
let mut annotator = Annotator {
index: Index {
local: NodeMap::new(),
extern_cache: DefIdMap::new()
},
parent: None
};
annotator.annotate(ast::CRATE_NODE_ID, &krate.attrs, |v| visit::walk_crate(v, krate));
annotator.index
}
}
/// Lookup the stability for a node, loading external crate
/// metadata as necessary.
pub fn lookup(tcx: &ty::ctxt, id: DefId) -> Option<Stability> {
// is this definition the implementation of a trait method?
match ty::trait_item_of_item(tcx, id) {
Some(ty::MethodTraitItemId(trait_method_id))
if trait_method_id!= id => {
lookup(tcx, trait_method_id)
}
_ if is_local(id) => {
tcx.stability.borrow().local.get(&id.node).cloned()
}
_ => {
let stab = csearch::get_stability(&tcx.sess.cstore, id);
let mut index = tcx.stability.borrow_mut();
(*index).extern_cache.insert(id, stab.clone());
stab
}
}
}
|
visit_variant
|
identifier_name
|
stability.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A pass that annotates every item and method with its stability level,
//! propagating default levels lexically from parent to children ast nodes.
use util::nodemap::{NodeMap, DefIdMap};
use syntax::codemap::Span;
use syntax::{attr, visit};
use syntax::ast;
use syntax::ast::{Attribute, Block, Crate, DefId, FnDecl, NodeId, Variant};
use syntax::ast::{Item, RequiredMethod, ProvidedMethod, TraitItem};
use syntax::ast::{TypeMethod, Method, Generics, StructField, TypeTraitItem};
use syntax::ast_util::is_local;
use syntax::attr::Stability;
use syntax::visit::{FnKind, FkMethod, Visitor};
use middle::ty;
use metadata::csearch;
use std::mem::replace;
/// A stability index, giving the stability level for items and methods.
pub struct Index {
// stability for crate-local items; unmarked stability == no entry
local: NodeMap<Stability>,
// cache for extern-crate items; unmarked stability == entry with None
extern_cache: DefIdMap<Option<Stability>>
}
// A private tree-walker for producing an Index.
struct Annotator {
index: Index,
parent: Option<Stability>
}
impl Annotator {
// Determine the stability for a node based on its attributes and inherited
// stability. The stability is recorded in the index and used as the parent.
fn annotate(&mut self, id: NodeId, attrs: &Vec<Attribute>, f: |&mut Annotator|) {
match attr::find_stability(attrs.as_slice()) {
Some(stab) => {
self.index.local.insert(id, stab.clone());
// Don't inherit #[stable]
if stab.level!= attr::Stable {
let parent = replace(&mut self.parent, Some(stab));
f(self);
self.parent = parent;
} else {
f(self);
}
}
None => {
self.parent.clone().map(|stab| self.index.local.insert(id, stab));
f(self);
}
}
}
}
impl<'v> Visitor<'v> for Annotator {
fn visit_item(&mut self, i: &Item) {
self.annotate(i.id, &i.attrs, |v| visit::walk_item(v, i));
match i.node {
ast::ItemStruct(ref sd, _) => {
sd.ctor_id.map(|id| {
self.annotate(id, &i.attrs, |_| {})
});
}
_ => {}
}
}
fn visit_fn(&mut self, fk: FnKind<'v>, _: &'v FnDecl,
_: &'v Block, _: Span, _: NodeId) {
match fk {
FkMethod(_, _, meth) => {
// Methods are not already annotated, so we annotate it
self.annotate(meth.id, &meth.attrs, |_| {});
}
_ => {}
}
// Items defined in a function body have no reason to have
// a stability attribute, so we don't recurse.
}
fn visit_trait_item(&mut self, t: &TraitItem) {
let (id, attrs) = match *t {
RequiredMethod(TypeMethod {id, ref attrs,..}) => (id, attrs),
// work around lack of pattern matching for @ types
ProvidedMethod(ref method) => {
match **method {
Method {ref attrs, id,..} => (id, attrs),
}
}
TypeTraitItem(ref typedef) => (typedef.ty_param.id, &typedef.attrs),
};
self.annotate(id, attrs, |v| visit::walk_trait_item(v, t));
}
fn visit_variant(&mut self, var: &Variant, g: &'v Generics) {
self.annotate(var.node.id, &var.node.attrs, |v| visit::walk_variant(v, var, g))
}
fn visit_struct_field(&mut self, s: &StructField) {
self.annotate(s.node.id, &s.node.attrs, |v| visit::walk_struct_field(v, s));
}
}
impl Index {
/// Construct the stability index for a crate being compiled.
pub fn build(krate: &Crate) -> Index {
let mut annotator = Annotator {
index: Index {
local: NodeMap::new(),
extern_cache: DefIdMap::new()
},
parent: None
};
annotator.annotate(ast::CRATE_NODE_ID, &krate.attrs, |v| visit::walk_crate(v, krate));
annotator.index
}
}
/// Lookup the stability for a node, loading external crate
|
pub fn lookup(tcx: &ty::ctxt, id: DefId) -> Option<Stability> {
// is this definition the implementation of a trait method?
match ty::trait_item_of_item(tcx, id) {
Some(ty::MethodTraitItemId(trait_method_id))
if trait_method_id!= id => {
lookup(tcx, trait_method_id)
}
_ if is_local(id) => {
tcx.stability.borrow().local.get(&id.node).cloned()
}
_ => {
let stab = csearch::get_stability(&tcx.sess.cstore, id);
let mut index = tcx.stability.borrow_mut();
(*index).extern_cache.insert(id, stab.clone());
stab
}
}
}
|
/// metadata as necessary.
|
random_line_split
|
stability.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A pass that annotates every item and method with its stability level,
//! propagating default levels lexically from parent to children ast nodes.
use util::nodemap::{NodeMap, DefIdMap};
use syntax::codemap::Span;
use syntax::{attr, visit};
use syntax::ast;
use syntax::ast::{Attribute, Block, Crate, DefId, FnDecl, NodeId, Variant};
use syntax::ast::{Item, RequiredMethod, ProvidedMethod, TraitItem};
use syntax::ast::{TypeMethod, Method, Generics, StructField, TypeTraitItem};
use syntax::ast_util::is_local;
use syntax::attr::Stability;
use syntax::visit::{FnKind, FkMethod, Visitor};
use middle::ty;
use metadata::csearch;
use std::mem::replace;
/// A stability index, giving the stability level for items and methods.
pub struct Index {
// stability for crate-local items; unmarked stability == no entry
local: NodeMap<Stability>,
// cache for extern-crate items; unmarked stability == entry with None
extern_cache: DefIdMap<Option<Stability>>
}
// A private tree-walker for producing an Index.
struct Annotator {
index: Index,
parent: Option<Stability>
}
impl Annotator {
// Determine the stability for a node based on its attributes and inherited
// stability. The stability is recorded in the index and used as the parent.
fn annotate(&mut self, id: NodeId, attrs: &Vec<Attribute>, f: |&mut Annotator|) {
match attr::find_stability(attrs.as_slice()) {
Some(stab) => {
self.index.local.insert(id, stab.clone());
// Don't inherit #[stable]
if stab.level!= attr::Stable {
let parent = replace(&mut self.parent, Some(stab));
f(self);
self.parent = parent;
} else {
f(self);
}
}
None => {
self.parent.clone().map(|stab| self.index.local.insert(id, stab));
f(self);
}
}
}
}
impl<'v> Visitor<'v> for Annotator {
fn visit_item(&mut self, i: &Item) {
self.annotate(i.id, &i.attrs, |v| visit::walk_item(v, i));
match i.node {
ast::ItemStruct(ref sd, _) => {
sd.ctor_id.map(|id| {
self.annotate(id, &i.attrs, |_| {})
});
}
_ => {}
}
}
fn visit_fn(&mut self, fk: FnKind<'v>, _: &'v FnDecl,
_: &'v Block, _: Span, _: NodeId) {
match fk {
FkMethod(_, _, meth) => {
// Methods are not already annotated, so we annotate it
self.annotate(meth.id, &meth.attrs, |_| {});
}
_ => {}
}
// Items defined in a function body have no reason to have
// a stability attribute, so we don't recurse.
}
fn visit_trait_item(&mut self, t: &TraitItem) {
let (id, attrs) = match *t {
RequiredMethod(TypeMethod {id, ref attrs,..}) => (id, attrs),
// work around lack of pattern matching for @ types
ProvidedMethod(ref method) => {
match **method {
Method {ref attrs, id,..} => (id, attrs),
}
}
TypeTraitItem(ref typedef) => (typedef.ty_param.id, &typedef.attrs),
};
self.annotate(id, attrs, |v| visit::walk_trait_item(v, t));
}
fn visit_variant(&mut self, var: &Variant, g: &'v Generics) {
self.annotate(var.node.id, &var.node.attrs, |v| visit::walk_variant(v, var, g))
}
fn visit_struct_field(&mut self, s: &StructField) {
self.annotate(s.node.id, &s.node.attrs, |v| visit::walk_struct_field(v, s));
}
}
impl Index {
/// Construct the stability index for a crate being compiled.
pub fn build(krate: &Crate) -> Index {
let mut annotator = Annotator {
index: Index {
local: NodeMap::new(),
extern_cache: DefIdMap::new()
},
parent: None
};
annotator.annotate(ast::CRATE_NODE_ID, &krate.attrs, |v| visit::walk_crate(v, krate));
annotator.index
}
}
/// Lookup the stability for a node, loading external crate
/// metadata as necessary.
pub fn lookup(tcx: &ty::ctxt, id: DefId) -> Option<Stability> {
// is this definition the implementation of a trait method?
match ty::trait_item_of_item(tcx, id) {
Some(ty::MethodTraitItemId(trait_method_id))
if trait_method_id!= id => {
lookup(tcx, trait_method_id)
}
_ if is_local(id) => {
tcx.stability.borrow().local.get(&id.node).cloned()
}
_ =>
|
}
}
|
{
let stab = csearch::get_stability(&tcx.sess.cstore, id);
let mut index = tcx.stability.borrow_mut();
(*index).extern_cache.insert(id, stab.clone());
stab
}
|
conditional_block
|
stability.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A pass that annotates every item and method with its stability level,
//! propagating default levels lexically from parent to children ast nodes.
use util::nodemap::{NodeMap, DefIdMap};
use syntax::codemap::Span;
use syntax::{attr, visit};
use syntax::ast;
use syntax::ast::{Attribute, Block, Crate, DefId, FnDecl, NodeId, Variant};
use syntax::ast::{Item, RequiredMethod, ProvidedMethod, TraitItem};
use syntax::ast::{TypeMethod, Method, Generics, StructField, TypeTraitItem};
use syntax::ast_util::is_local;
use syntax::attr::Stability;
use syntax::visit::{FnKind, FkMethod, Visitor};
use middle::ty;
use metadata::csearch;
use std::mem::replace;
/// A stability index, giving the stability level for items and methods.
pub struct Index {
// stability for crate-local items; unmarked stability == no entry
local: NodeMap<Stability>,
// cache for extern-crate items; unmarked stability == entry with None
extern_cache: DefIdMap<Option<Stability>>
}
// A private tree-walker for producing an Index.
struct Annotator {
index: Index,
parent: Option<Stability>
}
impl Annotator {
// Determine the stability for a node based on its attributes and inherited
// stability. The stability is recorded in the index and used as the parent.
fn annotate(&mut self, id: NodeId, attrs: &Vec<Attribute>, f: |&mut Annotator|) {
match attr::find_stability(attrs.as_slice()) {
Some(stab) => {
self.index.local.insert(id, stab.clone());
// Don't inherit #[stable]
if stab.level!= attr::Stable {
let parent = replace(&mut self.parent, Some(stab));
f(self);
self.parent = parent;
} else {
f(self);
}
}
None => {
self.parent.clone().map(|stab| self.index.local.insert(id, stab));
f(self);
}
}
}
}
impl<'v> Visitor<'v> for Annotator {
fn visit_item(&mut self, i: &Item) {
self.annotate(i.id, &i.attrs, |v| visit::walk_item(v, i));
match i.node {
ast::ItemStruct(ref sd, _) => {
sd.ctor_id.map(|id| {
self.annotate(id, &i.attrs, |_| {})
});
}
_ => {}
}
}
fn visit_fn(&mut self, fk: FnKind<'v>, _: &'v FnDecl,
_: &'v Block, _: Span, _: NodeId) {
match fk {
FkMethod(_, _, meth) => {
// Methods are not already annotated, so we annotate it
self.annotate(meth.id, &meth.attrs, |_| {});
}
_ => {}
}
// Items defined in a function body have no reason to have
// a stability attribute, so we don't recurse.
}
fn visit_trait_item(&mut self, t: &TraitItem) {
let (id, attrs) = match *t {
RequiredMethod(TypeMethod {id, ref attrs,..}) => (id, attrs),
// work around lack of pattern matching for @ types
ProvidedMethod(ref method) => {
match **method {
Method {ref attrs, id,..} => (id, attrs),
}
}
TypeTraitItem(ref typedef) => (typedef.ty_param.id, &typedef.attrs),
};
self.annotate(id, attrs, |v| visit::walk_trait_item(v, t));
}
fn visit_variant(&mut self, var: &Variant, g: &'v Generics) {
self.annotate(var.node.id, &var.node.attrs, |v| visit::walk_variant(v, var, g))
}
fn visit_struct_field(&mut self, s: &StructField)
|
}
impl Index {
/// Construct the stability index for a crate being compiled.
pub fn build(krate: &Crate) -> Index {
let mut annotator = Annotator {
index: Index {
local: NodeMap::new(),
extern_cache: DefIdMap::new()
},
parent: None
};
annotator.annotate(ast::CRATE_NODE_ID, &krate.attrs, |v| visit::walk_crate(v, krate));
annotator.index
}
}
/// Lookup the stability for a node, loading external crate
/// metadata as necessary.
pub fn lookup(tcx: &ty::ctxt, id: DefId) -> Option<Stability> {
// is this definition the implementation of a trait method?
match ty::trait_item_of_item(tcx, id) {
Some(ty::MethodTraitItemId(trait_method_id))
if trait_method_id!= id => {
lookup(tcx, trait_method_id)
}
_ if is_local(id) => {
tcx.stability.borrow().local.get(&id.node).cloned()
}
_ => {
let stab = csearch::get_stability(&tcx.sess.cstore, id);
let mut index = tcx.stability.borrow_mut();
(*index).extern_cache.insert(id, stab.clone());
stab
}
}
}
|
{
self.annotate(s.node.id, &s.node.attrs, |v| visit::walk_struct_field(v, s));
}
|
identifier_body
|
build.rs
|
#[macro_use]
extern crate quick_error;
extern crate chrono;
use std::env;
use chrono::*;
use std::convert::AsRef;
use std::fs::{File, create_dir_all};
use std::io::{Write, Read, BufWriter};
use std::path::{Path};
use std::process::{Command, Output};
use std::collections::HashMap;
quick_error! {
#[derive(Debug)]
pub enum Error {
Io(err: std::io::Error) {
from()
}
MissingEnvVar {
}
CommandFailed(err: Output){
from()
}
CommandEmptyOutput(err: String){
from()
}
}
}
pub enum EnvTidbit{
Env(&'static str),
EnvReq(&'static str),
Cmd{key: &'static str, cmd: &'static str},
CmdReq{key: &'static str, cmd: &'static str},
CmdOrEnvReq{key: &'static str, cmd: &'static str},
CmdOrEnv{key: &'static str, cmd: &'static str},
EnvOrCmdInconsistent{key: &'static str, cmd: &'static str},
FileContentsReq{key: &'static str, relative_to_build_rs: &'static str}
}
fn run(cmd: &str) -> std::result::Result<String,Error>
|
}
}
fn fetch_env(key: &str, result_required: bool, empty_is_missing: bool) -> Option<String>{
if result_required {
match env::var(key) {
Ok(ref v) if v.len() == 0 && empty_is_missing => {
panic!("Required env var {} is present - but empty - in the build environment", key);
},
Ok(v) => Some(v),
Err(e) => { panic!("Required env var {} missing in the build environment: {:?}", key, e); }
}
}else{
env::var(key).ok().and_then(|v| if v.len() == 0 && empty_is_missing { None } else { Some(v) })
}
}
fn command(key: &str, cmd: &str, result_required: bool, fallback_to_env: bool) -> Option<String>{
//Panic only if non-UTF-8 output is sent
let output = run(cmd);
//Don't panic when fetching env var
let env_val = match fallback_to_env { true => fetch_env(key, false, true), false => None};
//Ensure consistency if both are present
if let Ok(ref out_str) = output {
if let Some(ref env_str) = env_val {
if out_str!= env_str {
if out_str.trim()!= env_str.trim() {
panic!("Inconsistent values for {} and {}.\nCommand output: {}\nEnv var: {}", key, cmd, out_str, env_str);
}
}
}
}
if result_required && output.is_err() && env_val.is_none() {
if fallback_to_env {
panic!("Failed to acquire {} (required for build). \nCommand {} resulted in {:?}, and ENV var {} was missing or empty.",
key, cmd, output, key);
} else {
panic!("Failed to acquire {} (required for build). \nCommand {} resulted in {:?}. ENV var not consulted.",
key, cmd, output);
}
}else {
output.ok().or(env_val)
}
}
fn env_or_cmd(key: &str, cmd: &str) -> Option<String>{
fetch_env(key, false, true).or(run(cmd).ok())
}
//
//fn get_repo_root() -> PathBuf{
// let build_rs_path = file!();
// Path::new(&build_rs_path).parent().expect("Rust must be stripping parent directory info from file! macro. This breaks path stuff in build.rs.").to_owned()
//}
fn collect_info(shopping_list: Vec<EnvTidbit>) -> HashMap<String, Option<String>>{
let mut info = HashMap::new();
for from in shopping_list {
let (k,v) = match from {
EnvTidbit::Env(key) => (key, fetch_env(key, false, true)),
EnvTidbit::EnvReq(key) => (key, fetch_env(key, true, true)),
EnvTidbit::FileContentsReq{key, relative_to_build_rs} => {
let io_error_expect = format!("Failed to read file {:?}. This file is required to be embedded in output binaries.", relative_to_build_rs);
let mut file = File::open(relative_to_build_rs).expect(&io_error_expect);
let mut contents = String::new();
file.read_to_string( &mut contents).expect(&io_error_expect);
(key, Some(contents))
},
EnvTidbit::Cmd{key, cmd} => (key, command(key, cmd, false, false)),
EnvTidbit::CmdReq{key, cmd} => (key, command(key, cmd, true, false)),
EnvTidbit::CmdOrEnvReq{key, cmd} => (key, command(key, cmd, true, true)),
EnvTidbit::CmdOrEnv{key, cmd} => (key, command(key, cmd, false, true)),
EnvTidbit::EnvOrCmdInconsistent{key, cmd} => (key, env_or_cmd(key, cmd)),
};
info.insert(k.to_owned(),v);
}
info
}
fn what_to_collect() -> Vec<EnvTidbit>{
let mut c = Vec::new();
c.push(EnvTidbit::CmdOrEnvReq {key: "GIT_COMMIT", cmd: "git rev-parse HEAD"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_COMMIT_SHORT", cmd: "git rev-parse --short HEAD"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_DESCRIBE_ALWAYS", cmd: "git describe --always --tags"});
c.push(EnvTidbit::CmdOrEnvReq{key: "GIT_DESCRIBE_ALWAYS_LONG", cmd: "git describe --always --tags --long"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_DESCRIBE_ALL", cmd: "git describe --always --all --long"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_OPTIONAL_TAG", cmd: "git describe --exact-match --tags"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_OPTIONAL_BRANCH", cmd: "git symbolic-ref --short HEAD"});
static ENV_VARS: [&'static str;21] = ["ESTIMATED_ARTIFACT_URL","ESTIMATED_DOCS_URL","CI_SEQUENTIAL_BUILD_NUMBER","CI_BUILD_URL","CI_JOB_URL","CI_JOB_TITLE","CI_STRING",
"CI_PULL_REQUEST_INFO", "CI_TAG", "CI_REPO", "CI_RELATED_BRANCH", "CI", "TARGET", "OUT_DIR", "HOST", "OPT_LEVEL", "DEBUG", "PROFILE", "RUSTC", "RUSTFLAGS","TARGET_CPU"
];
for name in ENV_VARS.iter(){
c.push(EnvTidbit::Env(name));
}
c.push(EnvTidbit::EnvReq("CARGO_MANIFEST_DIR"));
c.push(EnvTidbit::Cmd{key: "GIT_STATUS", cmd: "git checkout../c_components/tests/visuals/weights.txt && git status"});
c.push(EnvTidbit::Cmd{key: "GLIBC_VERSION", cmd: "ldd --version"});
c.push(EnvTidbit::Cmd{key: "UNAME", cmd: "uname -av"});
c.push(EnvTidbit::Cmd{key: "WIN_SYSTEMINFO", cmd: "systeminfo.exe"});
//TODO: ver?
c.push(EnvTidbit::Cmd{key: "DEFAULT_GCC_VERSION", cmd: "gcc -v"});
c.push(EnvTidbit::Cmd{key: "DEFAULT_CLANG_VERSION", cmd: "clang --version"});
c.push(EnvTidbit::CmdReq{key: "DEFAULT_RUSTC_VERSION", cmd: "rustc -V"});
c.push(EnvTidbit::CmdReq{key: "DEFAULT_CARGO_VERSION", cmd: "cargo -V"});
c
}
fn write_file(name: &str, file_contents: String) -> std::result::Result<(), Error> {
let path = env::var_os("OUT_DIR").ok_or(Error::MissingEnvVar)?;
let path : &Path = path.as_ref();
create_dir_all(path)?;
let path = path.join(name);
let mut file = BufWriter::new(File::create(&path)?);
write!(file, "{}", file_contents)?;
Ok(())
}
fn main() {
let todo = what_to_collect();
let utcnow_val =Utc::now();
let mut results = collect_info(todo);
results.insert("GENERATED_DATETIME_UTC".to_owned(), Some(utcnow_val.to_rfc3339()));
results.insert("GENERATED_DATE_UTC".to_owned(), Some(utcnow_val.format("%Y-%m-%d").to_string()));
let mut contents = String::new();
contents += "use std::collections::HashMap;\n";
// contents += "#[macro_use]\nextern crate lazy_static;\n";
contents += "fn get_build_env_info() -> HashMap<&'static str, Option<&'static str>> {\n";
contents += " let mut i = HashMap::new();\n";
for (k, v) in &results{
let line = format!(" i.insert({:?}, {:?});\n", k,v);
contents += &line;
}
contents += " i\n}\nlazy_static! {\n pub static ref BUILD_ENV_INFO: HashMap<&'static str, Option<&'static str>> = ";
contents += "get_build_env_info();\n}\n";
//These vars are required for all builds
for name in ["GIT_COMMIT", "GIT_DESCRIBE_ALWAYS", "TARGET", "GENERATED_DATETIME_UTC", "GENERATED_DATE_UTC"].iter(){
let value = results.get::<str>(name).unwrap().to_owned().unwrap();
let line = format!("pub static {}: &'static str = {:?};\n", name,&value);
contents += &line;
}
let ci_value = results.get("CI").unwrap().to_owned().unwrap_or("false".to_owned()).to_lowercase();
let line = format!("pub static BUILT_ON_CI: bool = {};\n", ci_value);
contents += &line;
// let line = format!("pub static GENERATED_DATETIME_UTC: &'static str = {:?};\n", utcnow_val.to_rfc3339());
// contents += &line;
// let line = format!("pub static GENERATED_DATE_UTC: &'static str = {:?};\n", utcnow_val.format("%Y-%m-%d").to_string());
// contents += &line;
let _ = write_file("build_env_info.rs", contents ).expect("Saving git version");
}
|
{
let mut args: Vec<&str> = cmd.split(" ").collect::<Vec<&str>>();
if args.len() < 1 {
panic!("");
}
let exe = args.remove(0);
let output: Output = Command::new(exe)
.args(&args)
.output()?;
if !output.status.success() {
return Err(Error::CommandFailed(output));
}
let utf8_msg = format!("Command produced invalid UTF-8 output: {}", cmd);
let str_out: &str = std::str::from_utf8(&output.stdout).expect(&utf8_msg);
if str_out.split_whitespace().count() > 0 {
Ok(str_out.trim().to_owned())
} else {
Err(Error::from(str_out.to_owned()))
|
identifier_body
|
build.rs
|
#[macro_use]
extern crate quick_error;
extern crate chrono;
use std::env;
use chrono::*;
use std::convert::AsRef;
use std::fs::{File, create_dir_all};
use std::io::{Write, Read, BufWriter};
use std::path::{Path};
use std::process::{Command, Output};
use std::collections::HashMap;
quick_error! {
#[derive(Debug)]
pub enum Error {
Io(err: std::io::Error) {
from()
}
MissingEnvVar {
}
CommandFailed(err: Output){
from()
}
CommandEmptyOutput(err: String){
from()
}
}
}
pub enum EnvTidbit{
Env(&'static str),
EnvReq(&'static str),
Cmd{key: &'static str, cmd: &'static str},
CmdReq{key: &'static str, cmd: &'static str},
CmdOrEnvReq{key: &'static str, cmd: &'static str},
CmdOrEnv{key: &'static str, cmd: &'static str},
EnvOrCmdInconsistent{key: &'static str, cmd: &'static str},
FileContentsReq{key: &'static str, relative_to_build_rs: &'static str}
}
fn run(cmd: &str) -> std::result::Result<String,Error> {
let mut args: Vec<&str> = cmd.split(" ").collect::<Vec<&str>>();
if args.len() < 1 {
panic!("");
}
let exe = args.remove(0);
let output: Output = Command::new(exe)
.args(&args)
.output()?;
if!output.status.success() {
return Err(Error::CommandFailed(output));
}
let utf8_msg = format!("Command produced invalid UTF-8 output: {}", cmd);
let str_out: &str = std::str::from_utf8(&output.stdout).expect(&utf8_msg);
if str_out.split_whitespace().count() > 0 {
Ok(str_out.trim().to_owned())
} else {
Err(Error::from(str_out.to_owned()))
}
}
fn fetch_env(key: &str, result_required: bool, empty_is_missing: bool) -> Option<String>{
if result_required {
match env::var(key) {
Ok(ref v) if v.len() == 0 && empty_is_missing => {
panic!("Required env var {} is present - but empty - in the build environment", key);
},
Ok(v) => Some(v),
Err(e) => { panic!("Required env var {} missing in the build environment: {:?}", key, e); }
}
}else{
env::var(key).ok().and_then(|v| if v.len() == 0 && empty_is_missing { None } else { Some(v) })
}
}
fn command(key: &str, cmd: &str, result_required: bool, fallback_to_env: bool) -> Option<String>{
//Panic only if non-UTF-8 output is sent
let output = run(cmd);
//Don't panic when fetching env var
let env_val = match fallback_to_env { true => fetch_env(key, false, true), false => None};
//Ensure consistency if both are present
if let Ok(ref out_str) = output {
if let Some(ref env_str) = env_val {
if out_str!= env_str {
if out_str.trim()!= env_str.trim() {
panic!("Inconsistent values for {} and {}.\nCommand output: {}\nEnv var: {}", key, cmd, out_str, env_str);
}
}
}
}
if result_required && output.is_err() && env_val.is_none() {
if fallback_to_env {
panic!("Failed to acquire {} (required for build). \nCommand {} resulted in {:?}, and ENV var {} was missing or empty.",
key, cmd, output, key);
} else {
panic!("Failed to acquire {} (required for build). \nCommand {} resulted in {:?}. ENV var not consulted.",
key, cmd, output);
}
}else {
output.ok().or(env_val)
}
}
fn env_or_cmd(key: &str, cmd: &str) -> Option<String>{
fetch_env(key, false, true).or(run(cmd).ok())
}
//
//fn get_repo_root() -> PathBuf{
// let build_rs_path = file!();
// Path::new(&build_rs_path).parent().expect("Rust must be stripping parent directory info from file! macro. This breaks path stuff in build.rs.").to_owned()
//}
fn collect_info(shopping_list: Vec<EnvTidbit>) -> HashMap<String, Option<String>>{
let mut info = HashMap::new();
for from in shopping_list {
let (k,v) = match from {
EnvTidbit::Env(key) => (key, fetch_env(key, false, true)),
EnvTidbit::EnvReq(key) => (key, fetch_env(key, true, true)),
EnvTidbit::FileContentsReq{key, relative_to_build_rs} => {
let io_error_expect = format!("Failed to read file {:?}. This file is required to be embedded in output binaries.", relative_to_build_rs);
let mut file = File::open(relative_to_build_rs).expect(&io_error_expect);
let mut contents = String::new();
file.read_to_string( &mut contents).expect(&io_error_expect);
(key, Some(contents))
},
EnvTidbit::Cmd{key, cmd} => (key, command(key, cmd, false, false)),
EnvTidbit::CmdReq{key, cmd} => (key, command(key, cmd, true, false)),
EnvTidbit::CmdOrEnvReq{key, cmd} => (key, command(key, cmd, true, true)),
|
info
}
fn what_to_collect() -> Vec<EnvTidbit>{
let mut c = Vec::new();
c.push(EnvTidbit::CmdOrEnvReq {key: "GIT_COMMIT", cmd: "git rev-parse HEAD"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_COMMIT_SHORT", cmd: "git rev-parse --short HEAD"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_DESCRIBE_ALWAYS", cmd: "git describe --always --tags"});
c.push(EnvTidbit::CmdOrEnvReq{key: "GIT_DESCRIBE_ALWAYS_LONG", cmd: "git describe --always --tags --long"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_DESCRIBE_ALL", cmd: "git describe --always --all --long"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_OPTIONAL_TAG", cmd: "git describe --exact-match --tags"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_OPTIONAL_BRANCH", cmd: "git symbolic-ref --short HEAD"});
static ENV_VARS: [&'static str;21] = ["ESTIMATED_ARTIFACT_URL","ESTIMATED_DOCS_URL","CI_SEQUENTIAL_BUILD_NUMBER","CI_BUILD_URL","CI_JOB_URL","CI_JOB_TITLE","CI_STRING",
"CI_PULL_REQUEST_INFO", "CI_TAG", "CI_REPO", "CI_RELATED_BRANCH", "CI", "TARGET", "OUT_DIR", "HOST", "OPT_LEVEL", "DEBUG", "PROFILE", "RUSTC", "RUSTFLAGS","TARGET_CPU"
];
for name in ENV_VARS.iter(){
c.push(EnvTidbit::Env(name));
}
c.push(EnvTidbit::EnvReq("CARGO_MANIFEST_DIR"));
c.push(EnvTidbit::Cmd{key: "GIT_STATUS", cmd: "git checkout../c_components/tests/visuals/weights.txt && git status"});
c.push(EnvTidbit::Cmd{key: "GLIBC_VERSION", cmd: "ldd --version"});
c.push(EnvTidbit::Cmd{key: "UNAME", cmd: "uname -av"});
c.push(EnvTidbit::Cmd{key: "WIN_SYSTEMINFO", cmd: "systeminfo.exe"});
//TODO: ver?
c.push(EnvTidbit::Cmd{key: "DEFAULT_GCC_VERSION", cmd: "gcc -v"});
c.push(EnvTidbit::Cmd{key: "DEFAULT_CLANG_VERSION", cmd: "clang --version"});
c.push(EnvTidbit::CmdReq{key: "DEFAULT_RUSTC_VERSION", cmd: "rustc -V"});
c.push(EnvTidbit::CmdReq{key: "DEFAULT_CARGO_VERSION", cmd: "cargo -V"});
c
}
fn write_file(name: &str, file_contents: String) -> std::result::Result<(), Error> {
let path = env::var_os("OUT_DIR").ok_or(Error::MissingEnvVar)?;
let path : &Path = path.as_ref();
create_dir_all(path)?;
let path = path.join(name);
let mut file = BufWriter::new(File::create(&path)?);
write!(file, "{}", file_contents)?;
Ok(())
}
fn main() {
let todo = what_to_collect();
let utcnow_val =Utc::now();
let mut results = collect_info(todo);
results.insert("GENERATED_DATETIME_UTC".to_owned(), Some(utcnow_val.to_rfc3339()));
results.insert("GENERATED_DATE_UTC".to_owned(), Some(utcnow_val.format("%Y-%m-%d").to_string()));
let mut contents = String::new();
contents += "use std::collections::HashMap;\n";
// contents += "#[macro_use]\nextern crate lazy_static;\n";
contents += "fn get_build_env_info() -> HashMap<&'static str, Option<&'static str>> {\n";
contents += " let mut i = HashMap::new();\n";
for (k, v) in &results{
let line = format!(" i.insert({:?}, {:?});\n", k,v);
contents += &line;
}
contents += " i\n}\nlazy_static! {\n pub static ref BUILD_ENV_INFO: HashMap<&'static str, Option<&'static str>> = ";
contents += "get_build_env_info();\n}\n";
//These vars are required for all builds
for name in ["GIT_COMMIT", "GIT_DESCRIBE_ALWAYS", "TARGET", "GENERATED_DATETIME_UTC", "GENERATED_DATE_UTC"].iter(){
let value = results.get::<str>(name).unwrap().to_owned().unwrap();
let line = format!("pub static {}: &'static str = {:?};\n", name,&value);
contents += &line;
}
let ci_value = results.get("CI").unwrap().to_owned().unwrap_or("false".to_owned()).to_lowercase();
let line = format!("pub static BUILT_ON_CI: bool = {};\n", ci_value);
contents += &line;
// let line = format!("pub static GENERATED_DATETIME_UTC: &'static str = {:?};\n", utcnow_val.to_rfc3339());
// contents += &line;
// let line = format!("pub static GENERATED_DATE_UTC: &'static str = {:?};\n", utcnow_val.format("%Y-%m-%d").to_string());
// contents += &line;
let _ = write_file("build_env_info.rs", contents ).expect("Saving git version");
}
|
EnvTidbit::CmdOrEnv{key, cmd} => (key, command(key, cmd, false, true)),
EnvTidbit::EnvOrCmdInconsistent{key, cmd} => (key, env_or_cmd(key, cmd)),
};
info.insert(k.to_owned(),v);
}
|
random_line_split
|
build.rs
|
#[macro_use]
extern crate quick_error;
extern crate chrono;
use std::env;
use chrono::*;
use std::convert::AsRef;
use std::fs::{File, create_dir_all};
use std::io::{Write, Read, BufWriter};
use std::path::{Path};
use std::process::{Command, Output};
use std::collections::HashMap;
quick_error! {
#[derive(Debug)]
pub enum Error {
Io(err: std::io::Error) {
from()
}
MissingEnvVar {
}
CommandFailed(err: Output){
from()
}
CommandEmptyOutput(err: String){
from()
}
}
}
pub enum EnvTidbit{
Env(&'static str),
EnvReq(&'static str),
Cmd{key: &'static str, cmd: &'static str},
CmdReq{key: &'static str, cmd: &'static str},
CmdOrEnvReq{key: &'static str, cmd: &'static str},
CmdOrEnv{key: &'static str, cmd: &'static str},
EnvOrCmdInconsistent{key: &'static str, cmd: &'static str},
FileContentsReq{key: &'static str, relative_to_build_rs: &'static str}
}
fn run(cmd: &str) -> std::result::Result<String,Error> {
let mut args: Vec<&str> = cmd.split(" ").collect::<Vec<&str>>();
if args.len() < 1 {
panic!("");
}
let exe = args.remove(0);
let output: Output = Command::new(exe)
.args(&args)
.output()?;
if!output.status.success() {
return Err(Error::CommandFailed(output));
}
let utf8_msg = format!("Command produced invalid UTF-8 output: {}", cmd);
let str_out: &str = std::str::from_utf8(&output.stdout).expect(&utf8_msg);
if str_out.split_whitespace().count() > 0 {
Ok(str_out.trim().to_owned())
} else {
Err(Error::from(str_out.to_owned()))
}
}
fn fetch_env(key: &str, result_required: bool, empty_is_missing: bool) -> Option<String>{
if result_required {
match env::var(key) {
Ok(ref v) if v.len() == 0 && empty_is_missing => {
panic!("Required env var {} is present - but empty - in the build environment", key);
},
Ok(v) => Some(v),
Err(e) => { panic!("Required env var {} missing in the build environment: {:?}", key, e); }
}
}else{
env::var(key).ok().and_then(|v| if v.len() == 0 && empty_is_missing { None } else { Some(v) })
}
}
fn command(key: &str, cmd: &str, result_required: bool, fallback_to_env: bool) -> Option<String>{
//Panic only if non-UTF-8 output is sent
let output = run(cmd);
//Don't panic when fetching env var
let env_val = match fallback_to_env { true => fetch_env(key, false, true), false => None};
//Ensure consistency if both are present
if let Ok(ref out_str) = output {
if let Some(ref env_str) = env_val {
if out_str!= env_str {
if out_str.trim()!= env_str.trim() {
panic!("Inconsistent values for {} and {}.\nCommand output: {}\nEnv var: {}", key, cmd, out_str, env_str);
}
}
}
}
if result_required && output.is_err() && env_val.is_none() {
if fallback_to_env {
panic!("Failed to acquire {} (required for build). \nCommand {} resulted in {:?}, and ENV var {} was missing or empty.",
key, cmd, output, key);
} else {
panic!("Failed to acquire {} (required for build). \nCommand {} resulted in {:?}. ENV var not consulted.",
key, cmd, output);
}
}else {
output.ok().or(env_val)
}
}
fn env_or_cmd(key: &str, cmd: &str) -> Option<String>{
fetch_env(key, false, true).or(run(cmd).ok())
}
//
//fn get_repo_root() -> PathBuf{
// let build_rs_path = file!();
// Path::new(&build_rs_path).parent().expect("Rust must be stripping parent directory info from file! macro. This breaks path stuff in build.rs.").to_owned()
//}
fn collect_info(shopping_list: Vec<EnvTidbit>) -> HashMap<String, Option<String>>{
let mut info = HashMap::new();
for from in shopping_list {
let (k,v) = match from {
EnvTidbit::Env(key) => (key, fetch_env(key, false, true)),
EnvTidbit::EnvReq(key) => (key, fetch_env(key, true, true)),
EnvTidbit::FileContentsReq{key, relative_to_build_rs} => {
let io_error_expect = format!("Failed to read file {:?}. This file is required to be embedded in output binaries.", relative_to_build_rs);
let mut file = File::open(relative_to_build_rs).expect(&io_error_expect);
let mut contents = String::new();
file.read_to_string( &mut contents).expect(&io_error_expect);
(key, Some(contents))
},
EnvTidbit::Cmd{key, cmd} => (key, command(key, cmd, false, false)),
EnvTidbit::CmdReq{key, cmd} => (key, command(key, cmd, true, false)),
EnvTidbit::CmdOrEnvReq{key, cmd} => (key, command(key, cmd, true, true)),
EnvTidbit::CmdOrEnv{key, cmd} => (key, command(key, cmd, false, true)),
EnvTidbit::EnvOrCmdInconsistent{key, cmd} => (key, env_or_cmd(key, cmd)),
};
info.insert(k.to_owned(),v);
}
info
}
fn what_to_collect() -> Vec<EnvTidbit>{
let mut c = Vec::new();
c.push(EnvTidbit::CmdOrEnvReq {key: "GIT_COMMIT", cmd: "git rev-parse HEAD"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_COMMIT_SHORT", cmd: "git rev-parse --short HEAD"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_DESCRIBE_ALWAYS", cmd: "git describe --always --tags"});
c.push(EnvTidbit::CmdOrEnvReq{key: "GIT_DESCRIBE_ALWAYS_LONG", cmd: "git describe --always --tags --long"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_DESCRIBE_ALL", cmd: "git describe --always --all --long"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_OPTIONAL_TAG", cmd: "git describe --exact-match --tags"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_OPTIONAL_BRANCH", cmd: "git symbolic-ref --short HEAD"});
static ENV_VARS: [&'static str;21] = ["ESTIMATED_ARTIFACT_URL","ESTIMATED_DOCS_URL","CI_SEQUENTIAL_BUILD_NUMBER","CI_BUILD_URL","CI_JOB_URL","CI_JOB_TITLE","CI_STRING",
"CI_PULL_REQUEST_INFO", "CI_TAG", "CI_REPO", "CI_RELATED_BRANCH", "CI", "TARGET", "OUT_DIR", "HOST", "OPT_LEVEL", "DEBUG", "PROFILE", "RUSTC", "RUSTFLAGS","TARGET_CPU"
];
for name in ENV_VARS.iter(){
c.push(EnvTidbit::Env(name));
}
c.push(EnvTidbit::EnvReq("CARGO_MANIFEST_DIR"));
c.push(EnvTidbit::Cmd{key: "GIT_STATUS", cmd: "git checkout../c_components/tests/visuals/weights.txt && git status"});
c.push(EnvTidbit::Cmd{key: "GLIBC_VERSION", cmd: "ldd --version"});
c.push(EnvTidbit::Cmd{key: "UNAME", cmd: "uname -av"});
c.push(EnvTidbit::Cmd{key: "WIN_SYSTEMINFO", cmd: "systeminfo.exe"});
//TODO: ver?
c.push(EnvTidbit::Cmd{key: "DEFAULT_GCC_VERSION", cmd: "gcc -v"});
c.push(EnvTidbit::Cmd{key: "DEFAULT_CLANG_VERSION", cmd: "clang --version"});
c.push(EnvTidbit::CmdReq{key: "DEFAULT_RUSTC_VERSION", cmd: "rustc -V"});
c.push(EnvTidbit::CmdReq{key: "DEFAULT_CARGO_VERSION", cmd: "cargo -V"});
c
}
fn write_file(name: &str, file_contents: String) -> std::result::Result<(), Error> {
let path = env::var_os("OUT_DIR").ok_or(Error::MissingEnvVar)?;
let path : &Path = path.as_ref();
create_dir_all(path)?;
let path = path.join(name);
let mut file = BufWriter::new(File::create(&path)?);
write!(file, "{}", file_contents)?;
Ok(())
}
fn
|
() {
let todo = what_to_collect();
let utcnow_val =Utc::now();
let mut results = collect_info(todo);
results.insert("GENERATED_DATETIME_UTC".to_owned(), Some(utcnow_val.to_rfc3339()));
results.insert("GENERATED_DATE_UTC".to_owned(), Some(utcnow_val.format("%Y-%m-%d").to_string()));
let mut contents = String::new();
contents += "use std::collections::HashMap;\n";
// contents += "#[macro_use]\nextern crate lazy_static;\n";
contents += "fn get_build_env_info() -> HashMap<&'static str, Option<&'static str>> {\n";
contents += " let mut i = HashMap::new();\n";
for (k, v) in &results{
let line = format!(" i.insert({:?}, {:?});\n", k,v);
contents += &line;
}
contents += " i\n}\nlazy_static! {\n pub static ref BUILD_ENV_INFO: HashMap<&'static str, Option<&'static str>> = ";
contents += "get_build_env_info();\n}\n";
//These vars are required for all builds
for name in ["GIT_COMMIT", "GIT_DESCRIBE_ALWAYS", "TARGET", "GENERATED_DATETIME_UTC", "GENERATED_DATE_UTC"].iter(){
let value = results.get::<str>(name).unwrap().to_owned().unwrap();
let line = format!("pub static {}: &'static str = {:?};\n", name,&value);
contents += &line;
}
let ci_value = results.get("CI").unwrap().to_owned().unwrap_or("false".to_owned()).to_lowercase();
let line = format!("pub static BUILT_ON_CI: bool = {};\n", ci_value);
contents += &line;
// let line = format!("pub static GENERATED_DATETIME_UTC: &'static str = {:?};\n", utcnow_val.to_rfc3339());
// contents += &line;
// let line = format!("pub static GENERATED_DATE_UTC: &'static str = {:?};\n", utcnow_val.format("%Y-%m-%d").to_string());
// contents += &line;
let _ = write_file("build_env_info.rs", contents ).expect("Saving git version");
}
|
main
|
identifier_name
|
build.rs
|
#[macro_use]
extern crate quick_error;
extern crate chrono;
use std::env;
use chrono::*;
use std::convert::AsRef;
use std::fs::{File, create_dir_all};
use std::io::{Write, Read, BufWriter};
use std::path::{Path};
use std::process::{Command, Output};
use std::collections::HashMap;
quick_error! {
#[derive(Debug)]
pub enum Error {
Io(err: std::io::Error) {
from()
}
MissingEnvVar {
}
CommandFailed(err: Output){
from()
}
CommandEmptyOutput(err: String){
from()
}
}
}
pub enum EnvTidbit{
Env(&'static str),
EnvReq(&'static str),
Cmd{key: &'static str, cmd: &'static str},
CmdReq{key: &'static str, cmd: &'static str},
CmdOrEnvReq{key: &'static str, cmd: &'static str},
CmdOrEnv{key: &'static str, cmd: &'static str},
EnvOrCmdInconsistent{key: &'static str, cmd: &'static str},
FileContentsReq{key: &'static str, relative_to_build_rs: &'static str}
}
fn run(cmd: &str) -> std::result::Result<String,Error> {
let mut args: Vec<&str> = cmd.split(" ").collect::<Vec<&str>>();
if args.len() < 1 {
panic!("");
}
let exe = args.remove(0);
let output: Output = Command::new(exe)
.args(&args)
.output()?;
if!output.status.success() {
return Err(Error::CommandFailed(output));
}
let utf8_msg = format!("Command produced invalid UTF-8 output: {}", cmd);
let str_out: &str = std::str::from_utf8(&output.stdout).expect(&utf8_msg);
if str_out.split_whitespace().count() > 0 {
Ok(str_out.trim().to_owned())
} else {
Err(Error::from(str_out.to_owned()))
}
}
fn fetch_env(key: &str, result_required: bool, empty_is_missing: bool) -> Option<String>{
if result_required {
match env::var(key) {
Ok(ref v) if v.len() == 0 && empty_is_missing => {
panic!("Required env var {} is present - but empty - in the build environment", key);
},
Ok(v) => Some(v),
Err(e) =>
|
}
}else{
env::var(key).ok().and_then(|v| if v.len() == 0 && empty_is_missing { None } else { Some(v) })
}
}
fn command(key: &str, cmd: &str, result_required: bool, fallback_to_env: bool) -> Option<String>{
//Panic only if non-UTF-8 output is sent
let output = run(cmd);
//Don't panic when fetching env var
let env_val = match fallback_to_env { true => fetch_env(key, false, true), false => None};
//Ensure consistency if both are present
if let Ok(ref out_str) = output {
if let Some(ref env_str) = env_val {
if out_str!= env_str {
if out_str.trim()!= env_str.trim() {
panic!("Inconsistent values for {} and {}.\nCommand output: {}\nEnv var: {}", key, cmd, out_str, env_str);
}
}
}
}
if result_required && output.is_err() && env_val.is_none() {
if fallback_to_env {
panic!("Failed to acquire {} (required for build). \nCommand {} resulted in {:?}, and ENV var {} was missing or empty.",
key, cmd, output, key);
} else {
panic!("Failed to acquire {} (required for build). \nCommand {} resulted in {:?}. ENV var not consulted.",
key, cmd, output);
}
}else {
output.ok().or(env_val)
}
}
fn env_or_cmd(key: &str, cmd: &str) -> Option<String>{
fetch_env(key, false, true).or(run(cmd).ok())
}
//
//fn get_repo_root() -> PathBuf{
// let build_rs_path = file!();
// Path::new(&build_rs_path).parent().expect("Rust must be stripping parent directory info from file! macro. This breaks path stuff in build.rs.").to_owned()
//}
fn collect_info(shopping_list: Vec<EnvTidbit>) -> HashMap<String, Option<String>>{
let mut info = HashMap::new();
for from in shopping_list {
let (k,v) = match from {
EnvTidbit::Env(key) => (key, fetch_env(key, false, true)),
EnvTidbit::EnvReq(key) => (key, fetch_env(key, true, true)),
EnvTidbit::FileContentsReq{key, relative_to_build_rs} => {
let io_error_expect = format!("Failed to read file {:?}. This file is required to be embedded in output binaries.", relative_to_build_rs);
let mut file = File::open(relative_to_build_rs).expect(&io_error_expect);
let mut contents = String::new();
file.read_to_string( &mut contents).expect(&io_error_expect);
(key, Some(contents))
},
EnvTidbit::Cmd{key, cmd} => (key, command(key, cmd, false, false)),
EnvTidbit::CmdReq{key, cmd} => (key, command(key, cmd, true, false)),
EnvTidbit::CmdOrEnvReq{key, cmd} => (key, command(key, cmd, true, true)),
EnvTidbit::CmdOrEnv{key, cmd} => (key, command(key, cmd, false, true)),
EnvTidbit::EnvOrCmdInconsistent{key, cmd} => (key, env_or_cmd(key, cmd)),
};
info.insert(k.to_owned(),v);
}
info
}
fn what_to_collect() -> Vec<EnvTidbit>{
let mut c = Vec::new();
c.push(EnvTidbit::CmdOrEnvReq {key: "GIT_COMMIT", cmd: "git rev-parse HEAD"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_COMMIT_SHORT", cmd: "git rev-parse --short HEAD"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_DESCRIBE_ALWAYS", cmd: "git describe --always --tags"});
c.push(EnvTidbit::CmdOrEnvReq{key: "GIT_DESCRIBE_ALWAYS_LONG", cmd: "git describe --always --tags --long"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_DESCRIBE_ALL", cmd: "git describe --always --all --long"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_OPTIONAL_TAG", cmd: "git describe --exact-match --tags"});
c.push(EnvTidbit::CmdOrEnv{key: "GIT_OPTIONAL_BRANCH", cmd: "git symbolic-ref --short HEAD"});
static ENV_VARS: [&'static str;21] = ["ESTIMATED_ARTIFACT_URL","ESTIMATED_DOCS_URL","CI_SEQUENTIAL_BUILD_NUMBER","CI_BUILD_URL","CI_JOB_URL","CI_JOB_TITLE","CI_STRING",
"CI_PULL_REQUEST_INFO", "CI_TAG", "CI_REPO", "CI_RELATED_BRANCH", "CI", "TARGET", "OUT_DIR", "HOST", "OPT_LEVEL", "DEBUG", "PROFILE", "RUSTC", "RUSTFLAGS","TARGET_CPU"
];
for name in ENV_VARS.iter(){
c.push(EnvTidbit::Env(name));
}
c.push(EnvTidbit::EnvReq("CARGO_MANIFEST_DIR"));
c.push(EnvTidbit::Cmd{key: "GIT_STATUS", cmd: "git checkout../c_components/tests/visuals/weights.txt && git status"});
c.push(EnvTidbit::Cmd{key: "GLIBC_VERSION", cmd: "ldd --version"});
c.push(EnvTidbit::Cmd{key: "UNAME", cmd: "uname -av"});
c.push(EnvTidbit::Cmd{key: "WIN_SYSTEMINFO", cmd: "systeminfo.exe"});
//TODO: ver?
c.push(EnvTidbit::Cmd{key: "DEFAULT_GCC_VERSION", cmd: "gcc -v"});
c.push(EnvTidbit::Cmd{key: "DEFAULT_CLANG_VERSION", cmd: "clang --version"});
c.push(EnvTidbit::CmdReq{key: "DEFAULT_RUSTC_VERSION", cmd: "rustc -V"});
c.push(EnvTidbit::CmdReq{key: "DEFAULT_CARGO_VERSION", cmd: "cargo -V"});
c
}
fn write_file(name: &str, file_contents: String) -> std::result::Result<(), Error> {
let path = env::var_os("OUT_DIR").ok_or(Error::MissingEnvVar)?;
let path : &Path = path.as_ref();
create_dir_all(path)?;
let path = path.join(name);
let mut file = BufWriter::new(File::create(&path)?);
write!(file, "{}", file_contents)?;
Ok(())
}
fn main() {
let todo = what_to_collect();
let utcnow_val =Utc::now();
let mut results = collect_info(todo);
results.insert("GENERATED_DATETIME_UTC".to_owned(), Some(utcnow_val.to_rfc3339()));
results.insert("GENERATED_DATE_UTC".to_owned(), Some(utcnow_val.format("%Y-%m-%d").to_string()));
let mut contents = String::new();
contents += "use std::collections::HashMap;\n";
// contents += "#[macro_use]\nextern crate lazy_static;\n";
contents += "fn get_build_env_info() -> HashMap<&'static str, Option<&'static str>> {\n";
contents += " let mut i = HashMap::new();\n";
for (k, v) in &results{
let line = format!(" i.insert({:?}, {:?});\n", k,v);
contents += &line;
}
contents += " i\n}\nlazy_static! {\n pub static ref BUILD_ENV_INFO: HashMap<&'static str, Option<&'static str>> = ";
contents += "get_build_env_info();\n}\n";
//These vars are required for all builds
for name in ["GIT_COMMIT", "GIT_DESCRIBE_ALWAYS", "TARGET", "GENERATED_DATETIME_UTC", "GENERATED_DATE_UTC"].iter(){
let value = results.get::<str>(name).unwrap().to_owned().unwrap();
let line = format!("pub static {}: &'static str = {:?};\n", name,&value);
contents += &line;
}
let ci_value = results.get("CI").unwrap().to_owned().unwrap_or("false".to_owned()).to_lowercase();
let line = format!("pub static BUILT_ON_CI: bool = {};\n", ci_value);
contents += &line;
// let line = format!("pub static GENERATED_DATETIME_UTC: &'static str = {:?};\n", utcnow_val.to_rfc3339());
// contents += &line;
// let line = format!("pub static GENERATED_DATE_UTC: &'static str = {:?};\n", utcnow_val.format("%Y-%m-%d").to_string());
// contents += &line;
let _ = write_file("build_env_info.rs", contents ).expect("Saving git version");
}
|
{ panic!("Required env var {} missing in the build environment: {:?}", key, e); }
|
conditional_block
|
020.rs
|
extern crate num;
extern crate test;
extern crate time;
use num::bigint::{BigUint,ToBigUint};
use std::iter::{AdditiveIterator,MultiplicativeIterator,range_inclusive};
use std::num::One;
use std::os::args;
use test::black_box;
use time::precise_time_ns;
fn factorial(n: uint) -> BigUint {
range_inclusive(One::one(), n.to_biguint().unwrap()).product()
}
#[inline]
fn f() -> uint
|
fn main() {
match args().as_slice() {
[_] => {
println!("{}", f());
},
[_, ref iters] => {
let iters: u64 = from_str(iters.as_slice()).unwrap();
let start = precise_time_ns();
for _ in range(0, iters) {
black_box(f());
}
let end = precise_time_ns();
println!("{}", end - start);
},
_ => unreachable!(),
}
}
|
{
factorial(100).to_str().as_slice()
.chars()
.filter_map(|c| c.to_digit(10))
.sum()
}
|
identifier_body
|
020.rs
|
extern crate num;
extern crate test;
extern crate time;
use num::bigint::{BigUint,ToBigUint};
use std::iter::{AdditiveIterator,MultiplicativeIterator,range_inclusive};
use std::num::One;
use std::os::args;
use test::black_box;
use time::precise_time_ns;
fn factorial(n: uint) -> BigUint {
range_inclusive(One::one(), n.to_biguint().unwrap()).product()
}
#[inline]
fn f() -> uint {
factorial(100).to_str().as_slice()
.chars()
.filter_map(|c| c.to_digit(10))
.sum()
}
fn main() {
match args().as_slice() {
[_] => {
println!("{}", f());
|
let start = precise_time_ns();
for _ in range(0, iters) {
black_box(f());
}
let end = precise_time_ns();
println!("{}", end - start);
},
_ => unreachable!(),
}
}
|
},
[_, ref iters] => {
let iters: u64 = from_str(iters.as_slice()).unwrap();
|
random_line_split
|
020.rs
|
extern crate num;
extern crate test;
extern crate time;
use num::bigint::{BigUint,ToBigUint};
use std::iter::{AdditiveIterator,MultiplicativeIterator,range_inclusive};
use std::num::One;
use std::os::args;
use test::black_box;
use time::precise_time_ns;
fn
|
(n: uint) -> BigUint {
range_inclusive(One::one(), n.to_biguint().unwrap()).product()
}
#[inline]
fn f() -> uint {
factorial(100).to_str().as_slice()
.chars()
.filter_map(|c| c.to_digit(10))
.sum()
}
fn main() {
match args().as_slice() {
[_] => {
println!("{}", f());
},
[_, ref iters] => {
let iters: u64 = from_str(iters.as_slice()).unwrap();
let start = precise_time_ns();
for _ in range(0, iters) {
black_box(f());
}
let end = precise_time_ns();
println!("{}", end - start);
},
_ => unreachable!(),
}
}
|
factorial
|
identifier_name
|
activation_manager.rs
|
use crate::joint::{JointConstraint, JointConstraintSet};
use crate::object::{Body, BodyHandle, BodySet, ColliderSet};
use crate::utils::union_find;
use crate::utils::union_find::UnionFindSet;
use crate::world::GeometricalWorld;
use na::{self, RealField};
/// Structure that monitors island-based activation/deactivation of bodies.
///
/// It is responsible for making objects sleep or wake up.
#[derive(Clone)]
pub struct ActivationManager<N: RealField, Handle: BodyHandle> {
mix_factor: N,
ufind: Vec<UnionFindSet>,
can_deactivate: Vec<bool>,
to_activate: Vec<Handle>,
id_to_body: Vec<Handle>,
}
impl<N: RealField, Handle: BodyHandle> ActivationManager<N, Handle> {
/// Creates a new `ActivationManager2`.
///
/// # Arguments:
/// * `thresold` - the minimum energy required to keep an object awake.
/// * `mix_factor` - the ratio of energy to keep between two frames.
pub fn new(mix_factor: N) -> ActivationManager<N, Handle> {
assert!(
mix_factor >= na::zero(),
"The energy mixing factor must be between 0.0 and 1.0."
);
ActivationManager {
mix_factor,
ufind: Vec::new(),
can_deactivate: Vec::new(),
to_activate: Vec::new(),
id_to_body: Vec::new(),
}
}
/// Notify the `ActivationManager2` that is has to activate an object at the next update.
// FIXME: this is not a very good name
pub fn deferred_activate(&mut self, handle: Handle) {
self.to_activate.push(handle);
}
fn update_energy(&self, body: &mut (impl Body<N> +?Sized)) {
// FIXME: avoid the Copy when NLL lands?
let status = *body.activation_status();
if let Some(threshold) = status.deactivation_threshold() {
// FIXME: take the time in account (to make a true RWA)
let new_energy = (N::one() - self.mix_factor) * status.energy()
+ self.mix_factor * (body.generalized_velocity().norm_squared());
body.activate_with_energy(new_energy.min(threshold * na::convert(4.0f64)));
}
}
/// Update the activation manager, activating and deactivating objects when needed.
pub fn update<Colliders, Constraints>(
&mut self,
bodies: &mut dyn BodySet<N, Handle = Handle>,
colliders: &Colliders,
gworld: &GeometricalWorld<N, Handle, Colliders::Handle>,
constraints: &Constraints,
active_bodies: &mut Vec<Handle>,
) where
Colliders: ColliderSet<N, Handle>,
Constraints: JointConstraintSet<N, Handle>,
{
/*
*
* Update bodies energy
*
*/
self.id_to_body.clear();
bodies.foreach_mut(&mut |handle, body: &mut dyn Body<N>| {
if body.status_dependent_ndofs()!= 0 {
if body.is_active() {
self.update_energy(body);
}
body.set_companion_id(self.id_to_body.len());
self.id_to_body.push(handle);
}
if body.is_kinematic() {
body.set_companion_id(self.id_to_body.len());
self.id_to_body.push(handle);
}
});
/*
*
* Activate bodies that need it.
*
*/
for handle in self.to_activate.iter() {
let body = try_continue!(bodies.get_mut(*handle));
if body.activation_status().deactivation_threshold().is_some() {
body.activate()
}
}
self.to_activate.clear();
/*
*
* Build islands.
*
*/
// Resize buffers.
self.ufind
.resize(self.id_to_body.len(), UnionFindSet::new(0));
self.can_deactivate.resize(self.id_to_body.len(), true);
// Init the union find.
// FIXME: are there more efficient ways of doing those?
for (i, u) in self.ufind.iter_mut().enumerate() {
u.reinit(i)
}
for d in self.can_deactivate.iter_mut() {
*d = true
}
// Run the union-find.
// FIXME: use the union-find from petgraph?
#[inline(always)]
fn make_union<N: RealField, Handle: BodyHandle>(
bodies: &dyn BodySet<N, Handle = Handle>,
b1: Handle,
b2: Handle,
ufs: &mut [UnionFindSet],
)
|
for (_, c1, _, c2, _, manifold) in gworld.contact_pairs(colliders, false) {
if manifold.len() > 0 {
make_union(bodies, c1.body(), c2.body(), &mut self.ufind)
}
}
constraints.foreach(|_, c| {
if!c.is_broken() {
let (b1, b2) = c.anchors();
make_union(bodies, b1.0, b2.0, &mut self.ufind);
}
});
/*
* Body activation/deactivation.
*/
// Find deactivable islands.
for i in 0usize..self.ufind.len() {
let root = union_find::find(i, &mut self.ufind[..]);
let handle = self.id_to_body[i];
let body = try_continue!(bodies.get(handle));
// FIXME: avoid the Copy when NLL lands?
let status = *body.activation_status();
self.can_deactivate[root] = match status.deactivation_threshold() {
Some(threshold) => self.can_deactivate[root] && status.energy() < threshold,
None => false,
};
}
// Activate/deactivate islands.
for i in 0usize..self.ufind.len() {
let root = union_find::find(i, &mut self.ufind[..]);
let handle = self.id_to_body[i];
let body = try_continue!(bodies.get_mut(handle));
if self.can_deactivate[root] {
// Everybody in this set can be deactivacted.
if body.is_active() {
body.deactivate();
}
} else if!body.is_kinematic() {
// Everybody in this set must be reactivated.
active_bodies.push(handle);
// FIXME: avoid the Copy when NLL lands?
let status = *body.activation_status();
if!status.is_active() && status.deactivation_threshold().is_some() {
body.activate()
}
}
}
}
}
|
{
let b1 = try_ret!(bodies.get(b1));
let b2 = try_ret!(bodies.get(b2));
if (b1.status_dependent_ndofs() != 0 || b1.is_kinematic())
&& (b2.status_dependent_ndofs() != 0 || b2.is_kinematic())
{
union_find::union(b1.companion_id(), b2.companion_id(), ufs)
}
}
|
identifier_body
|
activation_manager.rs
|
use crate::joint::{JointConstraint, JointConstraintSet};
use crate::object::{Body, BodyHandle, BodySet, ColliderSet};
use crate::utils::union_find;
use crate::utils::union_find::UnionFindSet;
use crate::world::GeometricalWorld;
use na::{self, RealField};
/// Structure that monitors island-based activation/deactivation of bodies.
///
/// It is responsible for making objects sleep or wake up.
#[derive(Clone)]
pub struct
|
<N: RealField, Handle: BodyHandle> {
mix_factor: N,
ufind: Vec<UnionFindSet>,
can_deactivate: Vec<bool>,
to_activate: Vec<Handle>,
id_to_body: Vec<Handle>,
}
impl<N: RealField, Handle: BodyHandle> ActivationManager<N, Handle> {
/// Creates a new `ActivationManager2`.
///
/// # Arguments:
/// * `thresold` - the minimum energy required to keep an object awake.
/// * `mix_factor` - the ratio of energy to keep between two frames.
pub fn new(mix_factor: N) -> ActivationManager<N, Handle> {
assert!(
mix_factor >= na::zero(),
"The energy mixing factor must be between 0.0 and 1.0."
);
ActivationManager {
mix_factor,
ufind: Vec::new(),
can_deactivate: Vec::new(),
to_activate: Vec::new(),
id_to_body: Vec::new(),
}
}
/// Notify the `ActivationManager2` that is has to activate an object at the next update.
// FIXME: this is not a very good name
pub fn deferred_activate(&mut self, handle: Handle) {
self.to_activate.push(handle);
}
fn update_energy(&self, body: &mut (impl Body<N> +?Sized)) {
// FIXME: avoid the Copy when NLL lands?
let status = *body.activation_status();
if let Some(threshold) = status.deactivation_threshold() {
// FIXME: take the time in account (to make a true RWA)
let new_energy = (N::one() - self.mix_factor) * status.energy()
+ self.mix_factor * (body.generalized_velocity().norm_squared());
body.activate_with_energy(new_energy.min(threshold * na::convert(4.0f64)));
}
}
/// Update the activation manager, activating and deactivating objects when needed.
pub fn update<Colliders, Constraints>(
&mut self,
bodies: &mut dyn BodySet<N, Handle = Handle>,
colliders: &Colliders,
gworld: &GeometricalWorld<N, Handle, Colliders::Handle>,
constraints: &Constraints,
active_bodies: &mut Vec<Handle>,
) where
Colliders: ColliderSet<N, Handle>,
Constraints: JointConstraintSet<N, Handle>,
{
/*
*
* Update bodies energy
*
*/
self.id_to_body.clear();
bodies.foreach_mut(&mut |handle, body: &mut dyn Body<N>| {
if body.status_dependent_ndofs()!= 0 {
if body.is_active() {
self.update_energy(body);
}
body.set_companion_id(self.id_to_body.len());
self.id_to_body.push(handle);
}
if body.is_kinematic() {
body.set_companion_id(self.id_to_body.len());
self.id_to_body.push(handle);
}
});
/*
*
* Activate bodies that need it.
*
*/
for handle in self.to_activate.iter() {
let body = try_continue!(bodies.get_mut(*handle));
if body.activation_status().deactivation_threshold().is_some() {
body.activate()
}
}
self.to_activate.clear();
/*
*
* Build islands.
*
*/
// Resize buffers.
self.ufind
.resize(self.id_to_body.len(), UnionFindSet::new(0));
self.can_deactivate.resize(self.id_to_body.len(), true);
// Init the union find.
// FIXME: are there more efficient ways of doing those?
for (i, u) in self.ufind.iter_mut().enumerate() {
u.reinit(i)
}
for d in self.can_deactivate.iter_mut() {
*d = true
}
// Run the union-find.
// FIXME: use the union-find from petgraph?
#[inline(always)]
fn make_union<N: RealField, Handle: BodyHandle>(
bodies: &dyn BodySet<N, Handle = Handle>,
b1: Handle,
b2: Handle,
ufs: &mut [UnionFindSet],
) {
let b1 = try_ret!(bodies.get(b1));
let b2 = try_ret!(bodies.get(b2));
if (b1.status_dependent_ndofs()!= 0 || b1.is_kinematic())
&& (b2.status_dependent_ndofs()!= 0 || b2.is_kinematic())
{
union_find::union(b1.companion_id(), b2.companion_id(), ufs)
}
}
for (_, c1, _, c2, _, manifold) in gworld.contact_pairs(colliders, false) {
if manifold.len() > 0 {
make_union(bodies, c1.body(), c2.body(), &mut self.ufind)
}
}
constraints.foreach(|_, c| {
if!c.is_broken() {
let (b1, b2) = c.anchors();
make_union(bodies, b1.0, b2.0, &mut self.ufind);
}
});
/*
* Body activation/deactivation.
*/
// Find deactivable islands.
for i in 0usize..self.ufind.len() {
let root = union_find::find(i, &mut self.ufind[..]);
let handle = self.id_to_body[i];
let body = try_continue!(bodies.get(handle));
// FIXME: avoid the Copy when NLL lands?
let status = *body.activation_status();
self.can_deactivate[root] = match status.deactivation_threshold() {
Some(threshold) => self.can_deactivate[root] && status.energy() < threshold,
None => false,
};
}
// Activate/deactivate islands.
for i in 0usize..self.ufind.len() {
let root = union_find::find(i, &mut self.ufind[..]);
let handle = self.id_to_body[i];
let body = try_continue!(bodies.get_mut(handle));
if self.can_deactivate[root] {
// Everybody in this set can be deactivacted.
if body.is_active() {
body.deactivate();
}
} else if!body.is_kinematic() {
// Everybody in this set must be reactivated.
active_bodies.push(handle);
// FIXME: avoid the Copy when NLL lands?
let status = *body.activation_status();
if!status.is_active() && status.deactivation_threshold().is_some() {
body.activate()
}
}
}
}
}
|
ActivationManager
|
identifier_name
|
activation_manager.rs
|
use crate::joint::{JointConstraint, JointConstraintSet};
use crate::object::{Body, BodyHandle, BodySet, ColliderSet};
use crate::utils::union_find;
use crate::utils::union_find::UnionFindSet;
use crate::world::GeometricalWorld;
use na::{self, RealField};
/// Structure that monitors island-based activation/deactivation of bodies.
///
/// It is responsible for making objects sleep or wake up.
#[derive(Clone)]
pub struct ActivationManager<N: RealField, Handle: BodyHandle> {
mix_factor: N,
ufind: Vec<UnionFindSet>,
can_deactivate: Vec<bool>,
to_activate: Vec<Handle>,
id_to_body: Vec<Handle>,
}
impl<N: RealField, Handle: BodyHandle> ActivationManager<N, Handle> {
/// Creates a new `ActivationManager2`.
///
/// # Arguments:
/// * `thresold` - the minimum energy required to keep an object awake.
/// * `mix_factor` - the ratio of energy to keep between two frames.
pub fn new(mix_factor: N) -> ActivationManager<N, Handle> {
assert!(
mix_factor >= na::zero(),
"The energy mixing factor must be between 0.0 and 1.0."
);
ActivationManager {
mix_factor,
ufind: Vec::new(),
can_deactivate: Vec::new(),
to_activate: Vec::new(),
id_to_body: Vec::new(),
}
}
/// Notify the `ActivationManager2` that is has to activate an object at the next update.
// FIXME: this is not a very good name
pub fn deferred_activate(&mut self, handle: Handle) {
self.to_activate.push(handle);
}
fn update_energy(&self, body: &mut (impl Body<N> +?Sized)) {
// FIXME: avoid the Copy when NLL lands?
let status = *body.activation_status();
if let Some(threshold) = status.deactivation_threshold() {
// FIXME: take the time in account (to make a true RWA)
let new_energy = (N::one() - self.mix_factor) * status.energy()
+ self.mix_factor * (body.generalized_velocity().norm_squared());
body.activate_with_energy(new_energy.min(threshold * na::convert(4.0f64)));
}
}
/// Update the activation manager, activating and deactivating objects when needed.
pub fn update<Colliders, Constraints>(
&mut self,
bodies: &mut dyn BodySet<N, Handle = Handle>,
colliders: &Colliders,
gworld: &GeometricalWorld<N, Handle, Colliders::Handle>,
constraints: &Constraints,
active_bodies: &mut Vec<Handle>,
) where
Colliders: ColliderSet<N, Handle>,
Constraints: JointConstraintSet<N, Handle>,
{
/*
*
* Update bodies energy
*
*/
self.id_to_body.clear();
bodies.foreach_mut(&mut |handle, body: &mut dyn Body<N>| {
if body.status_dependent_ndofs()!= 0 {
if body.is_active() {
self.update_energy(body);
}
body.set_companion_id(self.id_to_body.len());
self.id_to_body.push(handle);
}
if body.is_kinematic() {
body.set_companion_id(self.id_to_body.len());
self.id_to_body.push(handle);
}
});
/*
*
* Activate bodies that need it.
*
*/
for handle in self.to_activate.iter() {
let body = try_continue!(bodies.get_mut(*handle));
if body.activation_status().deactivation_threshold().is_some() {
body.activate()
}
}
self.to_activate.clear();
/*
*
* Build islands.
*
*/
// Resize buffers.
self.ufind
.resize(self.id_to_body.len(), UnionFindSet::new(0));
self.can_deactivate.resize(self.id_to_body.len(), true);
// Init the union find.
// FIXME: are there more efficient ways of doing those?
for (i, u) in self.ufind.iter_mut().enumerate() {
u.reinit(i)
}
for d in self.can_deactivate.iter_mut() {
*d = true
}
// Run the union-find.
// FIXME: use the union-find from petgraph?
#[inline(always)]
fn make_union<N: RealField, Handle: BodyHandle>(
bodies: &dyn BodySet<N, Handle = Handle>,
b1: Handle,
b2: Handle,
ufs: &mut [UnionFindSet],
) {
let b1 = try_ret!(bodies.get(b1));
let b2 = try_ret!(bodies.get(b2));
if (b1.status_dependent_ndofs()!= 0 || b1.is_kinematic())
&& (b2.status_dependent_ndofs()!= 0 || b2.is_kinematic())
{
union_find::union(b1.companion_id(), b2.companion_id(), ufs)
}
}
for (_, c1, _, c2, _, manifold) in gworld.contact_pairs(colliders, false) {
if manifold.len() > 0 {
make_union(bodies, c1.body(), c2.body(), &mut self.ufind)
}
}
constraints.foreach(|_, c| {
if!c.is_broken() {
let (b1, b2) = c.anchors();
make_union(bodies, b1.0, b2.0, &mut self.ufind);
}
});
/*
* Body activation/deactivation.
*/
// Find deactivable islands.
for i in 0usize..self.ufind.len() {
let root = union_find::find(i, &mut self.ufind[..]);
let handle = self.id_to_body[i];
let body = try_continue!(bodies.get(handle));
|
self.can_deactivate[root] = match status.deactivation_threshold() {
Some(threshold) => self.can_deactivate[root] && status.energy() < threshold,
None => false,
};
}
// Activate/deactivate islands.
for i in 0usize..self.ufind.len() {
let root = union_find::find(i, &mut self.ufind[..]);
let handle = self.id_to_body[i];
let body = try_continue!(bodies.get_mut(handle));
if self.can_deactivate[root] {
// Everybody in this set can be deactivacted.
if body.is_active() {
body.deactivate();
}
} else if!body.is_kinematic() {
// Everybody in this set must be reactivated.
active_bodies.push(handle);
// FIXME: avoid the Copy when NLL lands?
let status = *body.activation_status();
if!status.is_active() && status.deactivation_threshold().is_some() {
body.activate()
}
}
}
}
}
|
// FIXME: avoid the Copy when NLL lands ?
let status = *body.activation_status();
|
random_line_split
|
activation_manager.rs
|
use crate::joint::{JointConstraint, JointConstraintSet};
use crate::object::{Body, BodyHandle, BodySet, ColliderSet};
use crate::utils::union_find;
use crate::utils::union_find::UnionFindSet;
use crate::world::GeometricalWorld;
use na::{self, RealField};
/// Structure that monitors island-based activation/deactivation of bodies.
///
/// It is responsible for making objects sleep or wake up.
#[derive(Clone)]
pub struct ActivationManager<N: RealField, Handle: BodyHandle> {
mix_factor: N,
ufind: Vec<UnionFindSet>,
can_deactivate: Vec<bool>,
to_activate: Vec<Handle>,
id_to_body: Vec<Handle>,
}
impl<N: RealField, Handle: BodyHandle> ActivationManager<N, Handle> {
/// Creates a new `ActivationManager2`.
///
/// # Arguments:
/// * `thresold` - the minimum energy required to keep an object awake.
/// * `mix_factor` - the ratio of energy to keep between two frames.
pub fn new(mix_factor: N) -> ActivationManager<N, Handle> {
assert!(
mix_factor >= na::zero(),
"The energy mixing factor must be between 0.0 and 1.0."
);
ActivationManager {
mix_factor,
ufind: Vec::new(),
can_deactivate: Vec::new(),
to_activate: Vec::new(),
id_to_body: Vec::new(),
}
}
/// Notify the `ActivationManager2` that is has to activate an object at the next update.
// FIXME: this is not a very good name
pub fn deferred_activate(&mut self, handle: Handle) {
self.to_activate.push(handle);
}
fn update_energy(&self, body: &mut (impl Body<N> +?Sized)) {
// FIXME: avoid the Copy when NLL lands?
let status = *body.activation_status();
if let Some(threshold) = status.deactivation_threshold() {
// FIXME: take the time in account (to make a true RWA)
let new_energy = (N::one() - self.mix_factor) * status.energy()
+ self.mix_factor * (body.generalized_velocity().norm_squared());
body.activate_with_energy(new_energy.min(threshold * na::convert(4.0f64)));
}
}
/// Update the activation manager, activating and deactivating objects when needed.
pub fn update<Colliders, Constraints>(
&mut self,
bodies: &mut dyn BodySet<N, Handle = Handle>,
colliders: &Colliders,
gworld: &GeometricalWorld<N, Handle, Colliders::Handle>,
constraints: &Constraints,
active_bodies: &mut Vec<Handle>,
) where
Colliders: ColliderSet<N, Handle>,
Constraints: JointConstraintSet<N, Handle>,
{
/*
*
* Update bodies energy
*
*/
self.id_to_body.clear();
bodies.foreach_mut(&mut |handle, body: &mut dyn Body<N>| {
if body.status_dependent_ndofs()!= 0 {
if body.is_active() {
self.update_energy(body);
}
body.set_companion_id(self.id_to_body.len());
self.id_to_body.push(handle);
}
if body.is_kinematic() {
body.set_companion_id(self.id_to_body.len());
self.id_to_body.push(handle);
}
});
/*
*
* Activate bodies that need it.
*
*/
for handle in self.to_activate.iter() {
let body = try_continue!(bodies.get_mut(*handle));
if body.activation_status().deactivation_threshold().is_some() {
body.activate()
}
}
self.to_activate.clear();
/*
*
* Build islands.
*
*/
// Resize buffers.
self.ufind
.resize(self.id_to_body.len(), UnionFindSet::new(0));
self.can_deactivate.resize(self.id_to_body.len(), true);
// Init the union find.
// FIXME: are there more efficient ways of doing those?
for (i, u) in self.ufind.iter_mut().enumerate() {
u.reinit(i)
}
for d in self.can_deactivate.iter_mut() {
*d = true
}
// Run the union-find.
// FIXME: use the union-find from petgraph?
#[inline(always)]
fn make_union<N: RealField, Handle: BodyHandle>(
bodies: &dyn BodySet<N, Handle = Handle>,
b1: Handle,
b2: Handle,
ufs: &mut [UnionFindSet],
) {
let b1 = try_ret!(bodies.get(b1));
let b2 = try_ret!(bodies.get(b2));
if (b1.status_dependent_ndofs()!= 0 || b1.is_kinematic())
&& (b2.status_dependent_ndofs()!= 0 || b2.is_kinematic())
{
union_find::union(b1.companion_id(), b2.companion_id(), ufs)
}
}
for (_, c1, _, c2, _, manifold) in gworld.contact_pairs(colliders, false) {
if manifold.len() > 0 {
make_union(bodies, c1.body(), c2.body(), &mut self.ufind)
}
}
constraints.foreach(|_, c| {
if!c.is_broken() {
let (b1, b2) = c.anchors();
make_union(bodies, b1.0, b2.0, &mut self.ufind);
}
});
/*
* Body activation/deactivation.
*/
// Find deactivable islands.
for i in 0usize..self.ufind.len() {
let root = union_find::find(i, &mut self.ufind[..]);
let handle = self.id_to_body[i];
let body = try_continue!(bodies.get(handle));
// FIXME: avoid the Copy when NLL lands?
let status = *body.activation_status();
self.can_deactivate[root] = match status.deactivation_threshold() {
Some(threshold) => self.can_deactivate[root] && status.energy() < threshold,
None => false,
};
}
// Activate/deactivate islands.
for i in 0usize..self.ufind.len() {
let root = union_find::find(i, &mut self.ufind[..]);
let handle = self.id_to_body[i];
let body = try_continue!(bodies.get_mut(handle));
if self.can_deactivate[root]
|
else if!body.is_kinematic() {
// Everybody in this set must be reactivated.
active_bodies.push(handle);
// FIXME: avoid the Copy when NLL lands?
let status = *body.activation_status();
if!status.is_active() && status.deactivation_threshold().is_some() {
body.activate()
}
}
}
}
}
|
{
// Everybody in this set can be deactivacted.
if body.is_active() {
body.deactivate();
}
}
|
conditional_block
|
issue-2804.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate collections;
extern crate serialize;
use std::collections::HashMap;
use serialize::json::{mod, Json};
use std::option;
enum object {
bool_value(bool),
int_value(i64),
}
fn
|
(table: json::Object, key: String, default: String) -> String
{
match table.find(&key.to_string()) {
option::Option::Some(&Json::String(ref s)) => {
s.to_string()
}
option::Option::Some(value) => {
println!("{} was expected to be a string but is a {}", key, value);
default
}
option::Option::None => {
default
}
}
}
fn add_interface(_store: int, managed_ip: String, data: json::Json) -> (String, object)
{
match &data {
&Json::Object(ref interface) => {
let name = lookup(interface.clone(),
"ifDescr".to_string(),
"".to_string());
let label = format!("{}-{}", managed_ip, name);
(label, object::bool_value(false))
}
_ => {
println!("Expected dict for {} interfaces, found {}", managed_ip, data);
("gnos:missing-interface".to_string(), object::bool_value(true))
}
}
}
fn add_interfaces(store: int, managed_ip: String, device: HashMap<String, json::Json>)
-> Vec<(String, object)> {
match device["interfaces".to_string()]
{
Json::Array(ref interfaces) =>
{
interfaces.iter().map(|interface| {
add_interface(store, managed_ip.clone(), (*interface).clone())
}).collect()
}
_ =>
{
println!("Expected list for {} interfaces, found {}", managed_ip,
device["interfaces".to_string()]);
Vec::new()
}
}
}
pub fn main() {}
|
lookup
|
identifier_name
|
issue-2804.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate collections;
extern crate serialize;
use std::collections::HashMap;
use serialize::json::{mod, Json};
use std::option;
enum object {
bool_value(bool),
int_value(i64),
}
fn lookup(table: json::Object, key: String, default: String) -> String
{
match table.find(&key.to_string()) {
option::Option::Some(&Json::String(ref s)) => {
s.to_string()
}
option::Option::Some(value) => {
println!("{} was expected to be a string but is a {}", key, value);
default
}
option::Option::None => {
default
}
}
}
fn add_interface(_store: int, managed_ip: String, data: json::Json) -> (String, object)
{
match &data {
&Json::Object(ref interface) => {
let name = lookup(interface.clone(),
"ifDescr".to_string(),
"".to_string());
let label = format!("{}-{}", managed_ip, name);
(label, object::bool_value(false))
}
_ => {
println!("Expected dict for {} interfaces, found {}", managed_ip, data);
("gnos:missing-interface".to_string(), object::bool_value(true))
}
}
}
fn add_interfaces(store: int, managed_ip: String, device: HashMap<String, json::Json>)
-> Vec<(String, object)> {
match device["interfaces".to_string()]
{
Json::Array(ref interfaces) =>
{
interfaces.iter().map(|interface| {
add_interface(store, managed_ip.clone(), (*interface).clone())
}).collect()
}
_ =>
{
println!("Expected list for {} interfaces, found {}", managed_ip,
device["interfaces".to_string()]);
Vec::new()
}
}
}
pub fn main()
|
{}
|
identifier_body
|
|
issue-2804.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate collections;
extern crate serialize;
use std::collections::HashMap;
use serialize::json::{mod, Json};
use std::option;
enum object {
bool_value(bool),
int_value(i64),
}
fn lookup(table: json::Object, key: String, default: String) -> String
{
match table.find(&key.to_string()) {
option::Option::Some(&Json::String(ref s)) => {
s.to_string()
}
option::Option::Some(value) => {
println!("{} was expected to be a string but is a {}", key, value);
default
}
option::Option::None => {
default
}
}
}
fn add_interface(_store: int, managed_ip: String, data: json::Json) -> (String, object)
{
match &data {
&Json::Object(ref interface) => {
let name = lookup(interface.clone(),
"ifDescr".to_string(),
"".to_string());
let label = format!("{}-{}", managed_ip, name);
(label, object::bool_value(false))
}
_ =>
|
}
}
fn add_interfaces(store: int, managed_ip: String, device: HashMap<String, json::Json>)
-> Vec<(String, object)> {
match device["interfaces".to_string()]
{
Json::Array(ref interfaces) =>
{
interfaces.iter().map(|interface| {
add_interface(store, managed_ip.clone(), (*interface).clone())
}).collect()
}
_ =>
{
println!("Expected list for {} interfaces, found {}", managed_ip,
device["interfaces".to_string()]);
Vec::new()
}
}
}
pub fn main() {}
|
{
println!("Expected dict for {} interfaces, found {}", managed_ip, data);
("gnos:missing-interface".to_string(), object::bool_value(true))
}
|
conditional_block
|
issue-2804.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate collections;
extern crate serialize;
use std::collections::HashMap;
use serialize::json::{mod, Json};
|
}
fn lookup(table: json::Object, key: String, default: String) -> String
{
match table.find(&key.to_string()) {
option::Option::Some(&Json::String(ref s)) => {
s.to_string()
}
option::Option::Some(value) => {
println!("{} was expected to be a string but is a {}", key, value);
default
}
option::Option::None => {
default
}
}
}
fn add_interface(_store: int, managed_ip: String, data: json::Json) -> (String, object)
{
match &data {
&Json::Object(ref interface) => {
let name = lookup(interface.clone(),
"ifDescr".to_string(),
"".to_string());
let label = format!("{}-{}", managed_ip, name);
(label, object::bool_value(false))
}
_ => {
println!("Expected dict for {} interfaces, found {}", managed_ip, data);
("gnos:missing-interface".to_string(), object::bool_value(true))
}
}
}
fn add_interfaces(store: int, managed_ip: String, device: HashMap<String, json::Json>)
-> Vec<(String, object)> {
match device["interfaces".to_string()]
{
Json::Array(ref interfaces) =>
{
interfaces.iter().map(|interface| {
add_interface(store, managed_ip.clone(), (*interface).clone())
}).collect()
}
_ =>
{
println!("Expected list for {} interfaces, found {}", managed_ip,
device["interfaces".to_string()]);
Vec::new()
}
}
}
pub fn main() {}
|
use std::option;
enum object {
bool_value(bool),
int_value(i64),
|
random_line_split
|
sdiv.rs
|
use num::Num;
#[inline]
pub fn sdiv<'a, 'b, T: Copy + Num>(out: &'a mut [T; 9], a: &'b [T; 9], s: T) -> &'a mut [T; 9] {
let not_zero = s!= T::zero();
out[0] = if not_zero {a[0] / s} else {T::zero()};
out[1] = if not_zero {a[1] / s} else
|
;
out[2] = if not_zero {a[2] / s} else {T::zero()};
out[3] = if not_zero {a[3] / s} else {T::zero()};
out[4] = if not_zero {a[4] / s} else {T::zero()};
out[5] = if not_zero {a[5] / s} else {T::zero()};
out[6] = if not_zero {a[6] / s} else {T::zero()};
out[7] = if not_zero {a[7] / s} else {T::zero()};
out[8] = if not_zero {a[8] / s} else {T::zero()};
out
}
#[test]
fn test_sdiv() {
let mut v = [0, 0, 0, 0, 0, 0, 0, 0, 0];
sdiv(&mut v, &[1, 0, 0, 0, 1, 0, 0, 0, 1], 1);
assert!(v == [1, 0, 0, 0, 1, 0, 0, 0, 1]);
}
|
{T::zero()}
|
conditional_block
|
sdiv.rs
|
use num::Num;
#[inline]
|
out[1] = if not_zero {a[1] / s} else {T::zero()};
out[2] = if not_zero {a[2] / s} else {T::zero()};
out[3] = if not_zero {a[3] / s} else {T::zero()};
out[4] = if not_zero {a[4] / s} else {T::zero()};
out[5] = if not_zero {a[5] / s} else {T::zero()};
out[6] = if not_zero {a[6] / s} else {T::zero()};
out[7] = if not_zero {a[7] / s} else {T::zero()};
out[8] = if not_zero {a[8] / s} else {T::zero()};
out
}
#[test]
fn test_sdiv() {
let mut v = [0, 0, 0, 0, 0, 0, 0, 0, 0];
sdiv(&mut v, &[1, 0, 0, 0, 1, 0, 0, 0, 1], 1);
assert!(v == [1, 0, 0, 0, 1, 0, 0, 0, 1]);
}
|
pub fn sdiv<'a, 'b, T: Copy + Num>(out: &'a mut [T; 9], a: &'b [T; 9], s: T) -> &'a mut [T; 9] {
let not_zero = s != T::zero();
out[0] = if not_zero {a[0] / s} else {T::zero()};
|
random_line_split
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.