file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
atom.rs | use pyo3::prelude::*;
use rayon::prelude::*;
use std::collections::HashMap;
use crate::becke_partitioning;
use crate::bragg;
use crate::bse;
use crate::lebedev;
use crate::radial;
#[pyfunction]
pub fn | (
basis_set: &str,
radial_precision: f64,
min_num_angular_points: usize,
max_num_angular_points: usize,
proton_charges: Vec<i32>,
center_index: usize,
center_coordinates_bohr: Vec<(f64, f64, f64)>,
hardness: usize,
) -> (Vec<(f64, f64, f64)>, Vec<f64>) {
let (alpha_min, alpha_max) =
bse::ang_min_and_max(basis_set, proton_charges[center_index] as usize);
atom_grid(
alpha_min,
alpha_max,
radial_precision,
min_num_angular_points,
max_num_angular_points,
proton_charges,
center_index,
center_coordinates_bohr,
hardness,
)
}
#[pyfunction]
pub fn atom_grid(
alpha_min: HashMap<usize, f64>,
alpha_max: f64,
radial_precision: f64,
min_num_angular_points: usize,
max_num_angular_points: usize,
proton_charges: Vec<i32>,
center_index: usize,
center_coordinates_bohr: Vec<(f64, f64, f64)>,
hardness: usize,
) -> (Vec<(f64, f64, f64)>, Vec<f64>) {
let (rs, weights_radial) = radial::radial_grid_lmg(
alpha_min,
alpha_max,
radial_precision,
proton_charges[center_index],
);
// factors match DIRAC code
let rb = bragg::get_bragg_angstrom(proton_charges[center_index]) / (5.0 * 0.529177249);
let mut coordinates = Vec::new();
let mut weights = Vec::new();
let pi = std::f64::consts::PI;
let cx = center_coordinates_bohr[center_index].0;
let cy = center_coordinates_bohr[center_index].1;
let cz = center_coordinates_bohr[center_index].2;
for (&r, &weight_radial) in rs.iter().zip(weights_radial.iter()) {
// we read the angular grid at each radial step because of pruning
// this can be optimized
let mut num_angular = max_num_angular_points;
if r < rb {
num_angular = ((max_num_angular_points as f64) * r / rb) as usize;
num_angular = lebedev::get_closest_num_angular(num_angular);
if num_angular < min_num_angular_points {
num_angular = min_num_angular_points;
}
}
let (coordinates_angular, weights_angular) = lebedev::angular_grid(num_angular);
let wt = 4.0 * pi * weight_radial;
for (&xyz, &weight_angular) in coordinates_angular.iter().zip(weights_angular.iter()) {
let x = cx + r * xyz.0;
let y = cy + r * xyz.1;
let z = cz + r * xyz.2;
coordinates.push((x, y, z));
weights.push(wt * weight_angular);
}
}
if center_coordinates_bohr.len() > 1 {
let w_partitioning: Vec<f64> = coordinates
.par_iter()
.map(|c| {
becke_partitioning::partitioning_weight(
center_index,
¢er_coordinates_bohr,
&proton_charges,
*c,
hardness,
)
})
.collect();
for (i, w) in weights.iter_mut().enumerate() {
*w *= w_partitioning[i];
}
}
(coordinates, weights)
}
| atom_grid_bse | identifier_name |
atom.rs | use pyo3::prelude::*;
use rayon::prelude::*;
use std::collections::HashMap;
use crate::becke_partitioning;
use crate::bragg;
use crate::bse;
use crate::lebedev;
use crate::radial;
#[pyfunction]
pub fn atom_grid_bse(
basis_set: &str,
radial_precision: f64,
min_num_angular_points: usize,
max_num_angular_points: usize,
proton_charges: Vec<i32>,
center_index: usize,
center_coordinates_bohr: Vec<(f64, f64, f64)>,
hardness: usize,
) -> (Vec<(f64, f64, f64)>, Vec<f64>) |
#[pyfunction]
pub fn atom_grid(
alpha_min: HashMap<usize, f64>,
alpha_max: f64,
radial_precision: f64,
min_num_angular_points: usize,
max_num_angular_points: usize,
proton_charges: Vec<i32>,
center_index: usize,
center_coordinates_bohr: Vec<(f64, f64, f64)>,
hardness: usize,
) -> (Vec<(f64, f64, f64)>, Vec<f64>) {
let (rs, weights_radial) = radial::radial_grid_lmg(
alpha_min,
alpha_max,
radial_precision,
proton_charges[center_index],
);
// factors match DIRAC code
let rb = bragg::get_bragg_angstrom(proton_charges[center_index]) / (5.0 * 0.529177249);
let mut coordinates = Vec::new();
let mut weights = Vec::new();
let pi = std::f64::consts::PI;
let cx = center_coordinates_bohr[center_index].0;
let cy = center_coordinates_bohr[center_index].1;
let cz = center_coordinates_bohr[center_index].2;
for (&r, &weight_radial) in rs.iter().zip(weights_radial.iter()) {
// we read the angular grid at each radial step because of pruning
// this can be optimized
let mut num_angular = max_num_angular_points;
if r < rb {
num_angular = ((max_num_angular_points as f64) * r / rb) as usize;
num_angular = lebedev::get_closest_num_angular(num_angular);
if num_angular < min_num_angular_points {
num_angular = min_num_angular_points;
}
}
let (coordinates_angular, weights_angular) = lebedev::angular_grid(num_angular);
let wt = 4.0 * pi * weight_radial;
for (&xyz, &weight_angular) in coordinates_angular.iter().zip(weights_angular.iter()) {
let x = cx + r * xyz.0;
let y = cy + r * xyz.1;
let z = cz + r * xyz.2;
coordinates.push((x, y, z));
weights.push(wt * weight_angular);
}
}
if center_coordinates_bohr.len() > 1 {
let w_partitioning: Vec<f64> = coordinates
.par_iter()
.map(|c| {
becke_partitioning::partitioning_weight(
center_index,
¢er_coordinates_bohr,
&proton_charges,
*c,
hardness,
)
})
.collect();
for (i, w) in weights.iter_mut().enumerate() {
*w *= w_partitioning[i];
}
}
(coordinates, weights)
}
| {
let (alpha_min, alpha_max) =
bse::ang_min_and_max(basis_set, proton_charges[center_index] as usize);
atom_grid(
alpha_min,
alpha_max,
radial_precision,
min_num_angular_points,
max_num_angular_points,
proton_charges,
center_index,
center_coordinates_bohr,
hardness,
)
} | identifier_body |
atom.rs | use pyo3::prelude::*;
use rayon::prelude::*;
use std::collections::HashMap;
use crate::becke_partitioning;
use crate::bragg;
use crate::bse;
use crate::lebedev;
use crate::radial;
#[pyfunction]
pub fn atom_grid_bse(
basis_set: &str,
radial_precision: f64,
min_num_angular_points: usize,
max_num_angular_points: usize,
proton_charges: Vec<i32>,
center_index: usize,
center_coordinates_bohr: Vec<(f64, f64, f64)>,
hardness: usize,
) -> (Vec<(f64, f64, f64)>, Vec<f64>) {
let (alpha_min, alpha_max) =
bse::ang_min_and_max(basis_set, proton_charges[center_index] as usize);
atom_grid(
alpha_min,
alpha_max,
radial_precision,
min_num_angular_points,
max_num_angular_points,
proton_charges,
center_index,
center_coordinates_bohr,
hardness,
)
}
#[pyfunction]
pub fn atom_grid(
alpha_min: HashMap<usize, f64>,
alpha_max: f64,
radial_precision: f64,
min_num_angular_points: usize,
max_num_angular_points: usize,
proton_charges: Vec<i32>,
center_index: usize,
center_coordinates_bohr: Vec<(f64, f64, f64)>,
hardness: usize,
) -> (Vec<(f64, f64, f64)>, Vec<f64>) {
let (rs, weights_radial) = radial::radial_grid_lmg(
alpha_min,
alpha_max,
radial_precision,
proton_charges[center_index],
);
// factors match DIRAC code
let rb = bragg::get_bragg_angstrom(proton_charges[center_index]) / (5.0 * 0.529177249);
let mut coordinates = Vec::new();
let mut weights = Vec::new();
let pi = std::f64::consts::PI;
let cx = center_coordinates_bohr[center_index].0;
let cy = center_coordinates_bohr[center_index].1;
let cz = center_coordinates_bohr[center_index].2;
for (&r, &weight_radial) in rs.iter().zip(weights_radial.iter()) {
// we read the angular grid at each radial step because of pruning
// this can be optimized
let mut num_angular = max_num_angular_points;
if r < rb {
num_angular = ((max_num_angular_points as f64) * r / rb) as usize;
num_angular = lebedev::get_closest_num_angular(num_angular);
if num_angular < min_num_angular_points {
num_angular = min_num_angular_points;
}
}
let (coordinates_angular, weights_angular) = lebedev::angular_grid(num_angular);
let wt = 4.0 * pi * weight_radial;
for (&xyz, &weight_angular) in coordinates_angular.iter().zip(weights_angular.iter()) {
let x = cx + r * xyz.0;
let y = cy + r * xyz.1;
let z = cz + r * xyz.2;
coordinates.push((x, y, z));
weights.push(wt * weight_angular);
}
}
if center_coordinates_bohr.len() > 1 |
(coordinates, weights)
}
| {
let w_partitioning: Vec<f64> = coordinates
.par_iter()
.map(|c| {
becke_partitioning::partitioning_weight(
center_index,
¢er_coordinates_bohr,
&proton_charges,
*c,
hardness,
)
})
.collect();
for (i, w) in weights.iter_mut().enumerate() {
*w *= w_partitioning[i];
}
} | conditional_block |
atom.rs | use pyo3::prelude::*;
use rayon::prelude::*;
use std::collections::HashMap;
use crate::becke_partitioning;
use crate::bragg;
use crate::bse;
use crate::lebedev;
use crate::radial;
#[pyfunction]
pub fn atom_grid_bse(
basis_set: &str,
radial_precision: f64,
min_num_angular_points: usize,
max_num_angular_points: usize,
proton_charges: Vec<i32>,
center_index: usize,
center_coordinates_bohr: Vec<(f64, f64, f64)>,
hardness: usize,
) -> (Vec<(f64, f64, f64)>, Vec<f64>) {
let (alpha_min, alpha_max) =
bse::ang_min_and_max(basis_set, proton_charges[center_index] as usize);
atom_grid(
alpha_min,
alpha_max,
radial_precision,
min_num_angular_points,
max_num_angular_points,
proton_charges,
center_index,
center_coordinates_bohr,
hardness,
)
}
#[pyfunction]
pub fn atom_grid(
alpha_min: HashMap<usize, f64>,
alpha_max: f64,
radial_precision: f64,
min_num_angular_points: usize,
max_num_angular_points: usize,
proton_charges: Vec<i32>,
center_index: usize,
center_coordinates_bohr: Vec<(f64, f64, f64)>,
hardness: usize,
) -> (Vec<(f64, f64, f64)>, Vec<f64>) {
let (rs, weights_radial) = radial::radial_grid_lmg(
alpha_min,
alpha_max,
radial_precision,
proton_charges[center_index],
);
// factors match DIRAC code
let rb = bragg::get_bragg_angstrom(proton_charges[center_index]) / (5.0 * 0.529177249);
let mut coordinates = Vec::new();
let mut weights = Vec::new();
let pi = std::f64::consts::PI;
let cx = center_coordinates_bohr[center_index].0;
let cy = center_coordinates_bohr[center_index].1;
let cz = center_coordinates_bohr[center_index].2;
for (&r, &weight_radial) in rs.iter().zip(weights_radial.iter()) {
// we read the angular grid at each radial step because of pruning
// this can be optimized
let mut num_angular = max_num_angular_points; | num_angular = lebedev::get_closest_num_angular(num_angular);
if num_angular < min_num_angular_points {
num_angular = min_num_angular_points;
}
}
let (coordinates_angular, weights_angular) = lebedev::angular_grid(num_angular);
let wt = 4.0 * pi * weight_radial;
for (&xyz, &weight_angular) in coordinates_angular.iter().zip(weights_angular.iter()) {
let x = cx + r * xyz.0;
let y = cy + r * xyz.1;
let z = cz + r * xyz.2;
coordinates.push((x, y, z));
weights.push(wt * weight_angular);
}
}
if center_coordinates_bohr.len() > 1 {
let w_partitioning: Vec<f64> = coordinates
.par_iter()
.map(|c| {
becke_partitioning::partitioning_weight(
center_index,
¢er_coordinates_bohr,
&proton_charges,
*c,
hardness,
)
})
.collect();
for (i, w) in weights.iter_mut().enumerate() {
*w *= w_partitioning[i];
}
}
(coordinates, weights)
} | if r < rb {
num_angular = ((max_num_angular_points as f64) * r / rb) as usize; | random_line_split |
watchdog.rs | // Zinc, the bare metal stack for rust.
// Copyright 2014 Dawid Ciężarkiewcz <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Watchdog for Kinetis SIM module.
use util::support::nop;
#[path="../../util/ioreg.rs"] mod ioreg;
/// Watchdog state
#[allow(missing_docs)]
#[derive(Clone, Copy)]
pub enum State {
Disabled,
Enabled,
}
/// Init watchdog
pub fn init(state : State) {
| fn unlock() {
use self::reg::WDOG_unlock_unlock::*;
reg::WDOG.unlock.set_unlock(UnlockSeq1);
reg::WDOG.unlock.set_unlock(UnlockSeq2);
// Enforce one cycle delay
nop();
}
/// Write refresh sequence to refresh watchdog
pub fn refresh() {
use self::reg::WDOG_refresh_refresh::*;
reg::WDOG.refresh.set_refresh(RefreshSeq1);
reg::WDOG.refresh.set_refresh(RefreshSeq2);
}
#[allow(dead_code)]
mod reg {
use volatile_cell::VolatileCell;
use core::ops::Drop;
ioregs!(WDOG = {
/// Status and Control Register High
0x0 => reg16 stctrlh
{
0 => en, //= Watchdog enable
4 => allowupdate //= Enables updates to watchdog write-once registers,
//= after the reset-triggered initial configuration window
},
/// Refresh Register
0xc => reg16 refresh {
0..15 => refresh: wo
{
0xa602 => RefreshSeq1,
0xb480 => RefreshSeq2,
},
},
/// Unlock Register
0xe => reg16 unlock {
0..15 => unlock: wo
{
0xc520 => UnlockSeq1,
0xd928 => UnlockSeq2,
},
},
});
extern {
#[link_name="k20_iomem_WDOG"] pub static WDOG: WDOG;
}
}
| use self::State::*;
unlock();
match state {
Disabled => {
reg::WDOG.stctrlh.set_en(false);
},
Enabled => {
reg::WDOG.stctrlh.set_allowupdate(true);
},
}
}
| identifier_body |
watchdog.rs | // Zinc, the bare metal stack for rust.
// Copyright 2014 Dawid Ciężarkiewcz <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software | // limitations under the License.
//! Watchdog for Kinetis SIM module.
use util::support::nop;
#[path="../../util/ioreg.rs"] mod ioreg;
/// Watchdog state
#[allow(missing_docs)]
#[derive(Clone, Copy)]
pub enum State {
Disabled,
Enabled,
}
/// Init watchdog
pub fn init(state : State) {
use self::State::*;
unlock();
match state {
Disabled => {
reg::WDOG.stctrlh.set_en(false);
},
Enabled => {
reg::WDOG.stctrlh.set_allowupdate(true);
},
}
}
fn unlock() {
use self::reg::WDOG_unlock_unlock::*;
reg::WDOG.unlock.set_unlock(UnlockSeq1);
reg::WDOG.unlock.set_unlock(UnlockSeq2);
// Enforce one cycle delay
nop();
}
/// Write refresh sequence to refresh watchdog
pub fn refresh() {
use self::reg::WDOG_refresh_refresh::*;
reg::WDOG.refresh.set_refresh(RefreshSeq1);
reg::WDOG.refresh.set_refresh(RefreshSeq2);
}
#[allow(dead_code)]
mod reg {
use volatile_cell::VolatileCell;
use core::ops::Drop;
ioregs!(WDOG = {
/// Status and Control Register High
0x0 => reg16 stctrlh
{
0 => en, //= Watchdog enable
4 => allowupdate //= Enables updates to watchdog write-once registers,
//= after the reset-triggered initial configuration window
},
/// Refresh Register
0xc => reg16 refresh {
0..15 => refresh: wo
{
0xa602 => RefreshSeq1,
0xb480 => RefreshSeq2,
},
},
/// Unlock Register
0xe => reg16 unlock {
0..15 => unlock: wo
{
0xc520 => UnlockSeq1,
0xd928 => UnlockSeq2,
},
},
});
extern {
#[link_name="k20_iomem_WDOG"] pub static WDOG: WDOG;
}
} | // distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and | random_line_split |
watchdog.rs | // Zinc, the bare metal stack for rust.
// Copyright 2014 Dawid Ciężarkiewcz <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Watchdog for Kinetis SIM module.
use util::support::nop;
#[path="../../util/ioreg.rs"] mod ioreg;
/// Watchdog state
#[allow(missing_docs)]
#[derive(Clone, Copy)]
pub enum State {
Disabled,
Enabled,
}
/// Init watchdog
pub fn init(state : State) {
use self::State::*;
unlock();
match state {
Disabled => {
reg::WDOG.stctrlh.set_en(false);
},
Enabled => {
reg::WDOG.stctrlh.set_allowupdate(true);
},
}
}
fn unlock() {
use self::reg::WDOG_unlock_unlock::*;
reg::WDOG.unlock.set_unlock(UnlockSeq1);
reg::WDOG.unlock.set_unlock(UnlockSeq2);
// Enforce one cycle delay
nop();
}
/// Write refresh sequence to refresh watchdog
pub fn re | {
use self::reg::WDOG_refresh_refresh::*;
reg::WDOG.refresh.set_refresh(RefreshSeq1);
reg::WDOG.refresh.set_refresh(RefreshSeq2);
}
#[allow(dead_code)]
mod reg {
use volatile_cell::VolatileCell;
use core::ops::Drop;
ioregs!(WDOG = {
/// Status and Control Register High
0x0 => reg16 stctrlh
{
0 => en, //= Watchdog enable
4 => allowupdate //= Enables updates to watchdog write-once registers,
//= after the reset-triggered initial configuration window
},
/// Refresh Register
0xc => reg16 refresh {
0..15 => refresh: wo
{
0xa602 => RefreshSeq1,
0xb480 => RefreshSeq2,
},
},
/// Unlock Register
0xe => reg16 unlock {
0..15 => unlock: wo
{
0xc520 => UnlockSeq1,
0xd928 => UnlockSeq2,
},
},
});
extern {
#[link_name="k20_iomem_WDOG"] pub static WDOG: WDOG;
}
}
| fresh() | identifier_name |
reversi.rs |
use std::io::{Write, BufWriter};
static BOARD_SIZE: usize = 10; // 盤面のコマ+2
#[derive(Debug, Clone)]
pub enum PieceType {
Black,
White,
Sentinel,
Null,
}
fn flip_turn(t: PieceType) -> PieceType {
match t {
PieceType::Black => PieceType::White,
PieceType::White => PieceType::Black,
_ => PieceType::Null,
}
}
impl PartialEq for PieceType {
fn eq(&self, t: &PieceType) -> bool {
self == t
}
}
#[derive(Debug, Clone)]
pub struct Reversi {
board: Vec<Vec<PieceType>>,
pub turn: PieceType,
}
impl Reversi {
pub fn new() -> Self {
let mut v: Vec<Vec<PieceType>> = Vec::new();
let half = (BOARD_SIZE - 2) / 2;
for y in 0..BOARD_SIZE {
v.push(Vec::new());
for x in 0..BOARD_SIZE {
if y == 0 || x == 0 ||
y == BOARD_SIZE - 1 || x == BOARD_SIZE - 1 {
v[y].push(PieceType::Sentinel);
} else if y == half && x == half ||
y == half + 1 && x == half + 1 {
v[y | == half && x == half + 1 ||
y == half + 1 && x == half {
v[y].push(PieceType::Black);
} else {
v[y].push(PieceType::Null);
}
}
}
Reversi {
board: v,
turn: PieceType::Black,
}
}
pub fn debug_board(&self) {
let w = super::std::io::stdout();
let mut w = BufWriter::new(w.lock());
for _i in 0..BOARD_SIZE {
write!(w, "+-").expect("stdout Error");
}
writeln!(w, "+").expect("stdout Error");
for y in &self.board {
write!(w, "|").expect("stdout Error");
for x in y {
write!(w, "{}|", match *x {
PieceType::Black => "x",
PieceType::White => "o",
PieceType::Null => " ",
PieceType::Sentinel => "=",
//_ => "",
}).expect("stdout Error");
}
writeln!(w, "").expect("stdout Error");
for _i in 0..BOARD_SIZE {
write!(w, "+-").expect("stdout Error");
}
writeln!(w, "+").expect("stdout Error");
}
}
pub fn show_turn(&self) {
println!("turn: {}", match self.turn {
PieceType::Black => "Black",
PieceType::White => "White",
_ => "Something wrong.",
})
}
pub fn turn_change(self) -> Self {
Reversi {
board: self.board,
turn: flip_turn(self.turn),
}
}
pub fn show_score(&self) {
let mut black = 0;
let mut white = 0;
for y in &self.board {
for x in y {
match *x {
PieceType::White => white += 1,
PieceType::Black => black += 1,
_ => {},
};
}
}
let w = super::std::io::stdout();
let mut w = BufWriter::new(w.lock());
writeln!(w, "+---------+").expect("stdout Error");
writeln!(w, "| Score |").expect("stdout Error");
writeln!(w, "+---------+").expect("stdout Error");
writeln!(w, "|Black|{:03}|", black).expect("stdout Error");
writeln!(w, "+-----+---+").expect("stdout Error");
writeln!(w, "|White|{:03}|", white).expect("stdout Error");
writeln!(w, "+-----+---+").expect("stdout Error");
}
/*
fn is_placeable(&self, x: usize, y: usize, r: usize) -> bool {
let nt = flip_turn(self.turn.clone());
match self.board[y][x] {
PieceType::Null => {},
_ => return false,
};
let (iy, ix) = match r {
0 => (-1, -1), 1 => (-1, 0), 2 => (-1, 1),
3 => (0, -1), 4 => (0, 0), 5 => (0, 1),
6 => (1, -1), 7 => (1, 0), 8 => (1, 1),
_ => (0, 0),
};
let mut b = false;
let mut _y = y as isize;
let mut _x = x as isize;
loop {
_y += iy;
_x += ix;
if _y < 0 || _x < 0 {
return false;
}
if self.board[_y as usize][_x as usize] == nt {
b = true;
} else if b && self.board[_y as usize][_x as usize] == self.turn {
return true;
} else {
return false;
}
}
}
pub fn put(self, x: usize, y: usize) -> Result<Self> {
}
*/
pub fn continue_game(&self) -> bool {
let mut null = 0;
for y in &self.board {
for x in y {
match *x {
PieceType::Null => null += 1,
_ => {},
}
}
}
if null == 0 {
return false;
}
true
}
}
| ].push(PieceType::White);
} else if y | conditional_block |
reversi.rs |
use std::io::{Write, BufWriter};
static BOARD_SIZE: usize = 10; // 盤面のコマ+2
#[derive(Debug, Clone)]
pub enum PieceType {
Black,
White,
Sentinel,
Null,
}
fn flip_turn(t: PieceType) -> PieceType {
match t {
PieceType::Black => PieceType::White,
PieceType::White => PieceType::Black,
_ => PieceType::Null,
}
}
impl PartialEq for PieceType {
fn eq(&self, t: &PieceType) -> bool {
self = | ve(Debug, Clone)]
pub struct Reversi {
board: Vec<Vec<PieceType>>,
pub turn: PieceType,
}
impl Reversi {
pub fn new() -> Self {
let mut v: Vec<Vec<PieceType>> = Vec::new();
let half = (BOARD_SIZE - 2) / 2;
for y in 0..BOARD_SIZE {
v.push(Vec::new());
for x in 0..BOARD_SIZE {
if y == 0 || x == 0 ||
y == BOARD_SIZE - 1 || x == BOARD_SIZE - 1 {
v[y].push(PieceType::Sentinel);
} else if y == half && x == half ||
y == half + 1 && x == half + 1 {
v[y].push(PieceType::White);
} else if y == half && x == half + 1 ||
y == half + 1 && x == half {
v[y].push(PieceType::Black);
} else {
v[y].push(PieceType::Null);
}
}
}
Reversi {
board: v,
turn: PieceType::Black,
}
}
pub fn debug_board(&self) {
let w = super::std::io::stdout();
let mut w = BufWriter::new(w.lock());
for _i in 0..BOARD_SIZE {
write!(w, "+-").expect("stdout Error");
}
writeln!(w, "+").expect("stdout Error");
for y in &self.board {
write!(w, "|").expect("stdout Error");
for x in y {
write!(w, "{}|", match *x {
PieceType::Black => "x",
PieceType::White => "o",
PieceType::Null => " ",
PieceType::Sentinel => "=",
//_ => "",
}).expect("stdout Error");
}
writeln!(w, "").expect("stdout Error");
for _i in 0..BOARD_SIZE {
write!(w, "+-").expect("stdout Error");
}
writeln!(w, "+").expect("stdout Error");
}
}
pub fn show_turn(&self) {
println!("turn: {}", match self.turn {
PieceType::Black => "Black",
PieceType::White => "White",
_ => "Something wrong.",
})
}
pub fn turn_change(self) -> Self {
Reversi {
board: self.board,
turn: flip_turn(self.turn),
}
}
pub fn show_score(&self) {
let mut black = 0;
let mut white = 0;
for y in &self.board {
for x in y {
match *x {
PieceType::White => white += 1,
PieceType::Black => black += 1,
_ => {},
};
}
}
let w = super::std::io::stdout();
let mut w = BufWriter::new(w.lock());
writeln!(w, "+---------+").expect("stdout Error");
writeln!(w, "| Score |").expect("stdout Error");
writeln!(w, "+---------+").expect("stdout Error");
writeln!(w, "|Black|{:03}|", black).expect("stdout Error");
writeln!(w, "+-----+---+").expect("stdout Error");
writeln!(w, "|White|{:03}|", white).expect("stdout Error");
writeln!(w, "+-----+---+").expect("stdout Error");
}
/*
fn is_placeable(&self, x: usize, y: usize, r: usize) -> bool {
let nt = flip_turn(self.turn.clone());
match self.board[y][x] {
PieceType::Null => {},
_ => return false,
};
let (iy, ix) = match r {
0 => (-1, -1), 1 => (-1, 0), 2 => (-1, 1),
3 => (0, -1), 4 => (0, 0), 5 => (0, 1),
6 => (1, -1), 7 => (1, 0), 8 => (1, 1),
_ => (0, 0),
};
let mut b = false;
let mut _y = y as isize;
let mut _x = x as isize;
loop {
_y += iy;
_x += ix;
if _y < 0 || _x < 0 {
return false;
}
if self.board[_y as usize][_x as usize] == nt {
b = true;
} else if b && self.board[_y as usize][_x as usize] == self.turn {
return true;
} else {
return false;
}
}
}
pub fn put(self, x: usize, y: usize) -> Result<Self> {
}
*/
pub fn continue_game(&self) -> bool {
let mut null = 0;
for y in &self.board {
for x in y {
match *x {
PieceType::Null => null += 1,
_ => {},
}
}
}
if null == 0 {
return false;
}
true
}
}
| = t
}
}
#[deri | identifier_body |
reversi.rs |
use std::io::{Write, BufWriter};
static BOARD_SIZE: usize = 10; // 盤面のコマ+2
#[derive(Debug, Clone)]
pub enum PieceType {
Black,
White,
Sentinel,
Null,
}
fn flip_turn(t: PieceType) -> PieceType {
match t {
PieceType::Black => PieceType::White,
PieceType::White => PieceType::Black,
_ => PieceType::Null,
}
}
impl PartialEq for PieceType {
fn eq(&self, t: &PieceType) -> bool {
self == t
}
}
#[derive(Debug, Clone)]
pub struct Reversi {
| Vec<Vec<PieceType>>,
pub turn: PieceType,
}
impl Reversi {
pub fn new() -> Self {
let mut v: Vec<Vec<PieceType>> = Vec::new();
let half = (BOARD_SIZE - 2) / 2;
for y in 0..BOARD_SIZE {
v.push(Vec::new());
for x in 0..BOARD_SIZE {
if y == 0 || x == 0 ||
y == BOARD_SIZE - 1 || x == BOARD_SIZE - 1 {
v[y].push(PieceType::Sentinel);
} else if y == half && x == half ||
y == half + 1 && x == half + 1 {
v[y].push(PieceType::White);
} else if y == half && x == half + 1 ||
y == half + 1 && x == half {
v[y].push(PieceType::Black);
} else {
v[y].push(PieceType::Null);
}
}
}
Reversi {
board: v,
turn: PieceType::Black,
}
}
pub fn debug_board(&self) {
let w = super::std::io::stdout();
let mut w = BufWriter::new(w.lock());
for _i in 0..BOARD_SIZE {
write!(w, "+-").expect("stdout Error");
}
writeln!(w, "+").expect("stdout Error");
for y in &self.board {
write!(w, "|").expect("stdout Error");
for x in y {
write!(w, "{}|", match *x {
PieceType::Black => "x",
PieceType::White => "o",
PieceType::Null => " ",
PieceType::Sentinel => "=",
//_ => "",
}).expect("stdout Error");
}
writeln!(w, "").expect("stdout Error");
for _i in 0..BOARD_SIZE {
write!(w, "+-").expect("stdout Error");
}
writeln!(w, "+").expect("stdout Error");
}
}
pub fn show_turn(&self) {
println!("turn: {}", match self.turn {
PieceType::Black => "Black",
PieceType::White => "White",
_ => "Something wrong.",
})
}
pub fn turn_change(self) -> Self {
Reversi {
board: self.board,
turn: flip_turn(self.turn),
}
}
pub fn show_score(&self) {
let mut black = 0;
let mut white = 0;
for y in &self.board {
for x in y {
match *x {
PieceType::White => white += 1,
PieceType::Black => black += 1,
_ => {},
};
}
}
let w = super::std::io::stdout();
let mut w = BufWriter::new(w.lock());
writeln!(w, "+---------+").expect("stdout Error");
writeln!(w, "| Score |").expect("stdout Error");
writeln!(w, "+---------+").expect("stdout Error");
writeln!(w, "|Black|{:03}|", black).expect("stdout Error");
writeln!(w, "+-----+---+").expect("stdout Error");
writeln!(w, "|White|{:03}|", white).expect("stdout Error");
writeln!(w, "+-----+---+").expect("stdout Error");
}
/*
fn is_placeable(&self, x: usize, y: usize, r: usize) -> bool {
let nt = flip_turn(self.turn.clone());
match self.board[y][x] {
PieceType::Null => {},
_ => return false,
};
let (iy, ix) = match r {
0 => (-1, -1), 1 => (-1, 0), 2 => (-1, 1),
3 => (0, -1), 4 => (0, 0), 5 => (0, 1),
6 => (1, -1), 7 => (1, 0), 8 => (1, 1),
_ => (0, 0),
};
let mut b = false;
let mut _y = y as isize;
let mut _x = x as isize;
loop {
_y += iy;
_x += ix;
if _y < 0 || _x < 0 {
return false;
}
if self.board[_y as usize][_x as usize] == nt {
b = true;
} else if b && self.board[_y as usize][_x as usize] == self.turn {
return true;
} else {
return false;
}
}
}
pub fn put(self, x: usize, y: usize) -> Result<Self> {
}
*/
pub fn continue_game(&self) -> bool {
let mut null = 0;
for y in &self.board {
for x in y {
match *x {
PieceType::Null => null += 1,
_ => {},
}
}
}
if null == 0 {
return false;
}
true
}
}
| board: | identifier_name |
reversi.rs | use std::io::{Write, BufWriter};
static BOARD_SIZE: usize = 10; // 盤面のコマ+2
#[derive(Debug, Clone)]
pub enum PieceType {
Black,
White,
Sentinel,
Null,
}
fn flip_turn(t: PieceType) -> PieceType {
match t {
PieceType::Black => PieceType::White,
PieceType::White => PieceType::Black,
_ => PieceType::Null,
}
}
|
#[derive(Debug, Clone)]
pub struct Reversi {
board: Vec<Vec<PieceType>>,
pub turn: PieceType,
}
impl Reversi {
pub fn new() -> Self {
let mut v: Vec<Vec<PieceType>> = Vec::new();
let half = (BOARD_SIZE - 2) / 2;
for y in 0..BOARD_SIZE {
v.push(Vec::new());
for x in 0..BOARD_SIZE {
if y == 0 || x == 0 ||
y == BOARD_SIZE - 1 || x == BOARD_SIZE - 1 {
v[y].push(PieceType::Sentinel);
} else if y == half && x == half ||
y == half + 1 && x == half + 1 {
v[y].push(PieceType::White);
} else if y == half && x == half + 1 ||
y == half + 1 && x == half {
v[y].push(PieceType::Black);
} else {
v[y].push(PieceType::Null);
}
}
}
Reversi {
board: v,
turn: PieceType::Black,
}
}
pub fn debug_board(&self) {
let w = super::std::io::stdout();
let mut w = BufWriter::new(w.lock());
for _i in 0..BOARD_SIZE {
write!(w, "+-").expect("stdout Error");
}
writeln!(w, "+").expect("stdout Error");
for y in &self.board {
write!(w, "|").expect("stdout Error");
for x in y {
write!(w, "{}|", match *x {
PieceType::Black => "x",
PieceType::White => "o",
PieceType::Null => " ",
PieceType::Sentinel => "=",
//_ => "",
}).expect("stdout Error");
}
writeln!(w, "").expect("stdout Error");
for _i in 0..BOARD_SIZE {
write!(w, "+-").expect("stdout Error");
}
writeln!(w, "+").expect("stdout Error");
}
}
pub fn show_turn(&self) {
println!("turn: {}", match self.turn {
PieceType::Black => "Black",
PieceType::White => "White",
_ => "Something wrong.",
})
}
pub fn turn_change(self) -> Self {
Reversi {
board: self.board,
turn: flip_turn(self.turn),
}
}
pub fn show_score(&self) {
let mut black = 0;
let mut white = 0;
for y in &self.board {
for x in y {
match *x {
PieceType::White => white += 1,
PieceType::Black => black += 1,
_ => {},
};
}
}
let w = super::std::io::stdout();
let mut w = BufWriter::new(w.lock());
writeln!(w, "+---------+").expect("stdout Error");
writeln!(w, "| Score |").expect("stdout Error");
writeln!(w, "+---------+").expect("stdout Error");
writeln!(w, "|Black|{:03}|", black).expect("stdout Error");
writeln!(w, "+-----+---+").expect("stdout Error");
writeln!(w, "|White|{:03}|", white).expect("stdout Error");
writeln!(w, "+-----+---+").expect("stdout Error");
}
/*
fn is_placeable(&self, x: usize, y: usize, r: usize) -> bool {
let nt = flip_turn(self.turn.clone());
match self.board[y][x] {
PieceType::Null => {},
_ => return false,
};
let (iy, ix) = match r {
0 => (-1, -1), 1 => (-1, 0), 2 => (-1, 1),
3 => (0, -1), 4 => (0, 0), 5 => (0, 1),
6 => (1, -1), 7 => (1, 0), 8 => (1, 1),
_ => (0, 0),
};
let mut b = false;
let mut _y = y as isize;
let mut _x = x as isize;
loop {
_y += iy;
_x += ix;
if _y < 0 || _x < 0 {
return false;
}
if self.board[_y as usize][_x as usize] == nt {
b = true;
} else if b && self.board[_y as usize][_x as usize] == self.turn {
return true;
} else {
return false;
}
}
}
pub fn put(self, x: usize, y: usize) -> Result<Self> {
}
*/
pub fn continue_game(&self) -> bool {
let mut null = 0;
for y in &self.board {
for x in y {
match *x {
PieceType::Null => null += 1,
_ => {},
}
}
}
if null == 0 {
return false;
}
true
}
} | impl PartialEq for PieceType {
fn eq(&self, t: &PieceType) -> bool {
self == t
}
} | random_line_split |
tokens.rs | //* This file is part of the uutils coreutils package.
//*
//* (c) Roman Gafiyatullin <[email protected]>
//*
//* For the full copyright and license information, please view the LICENSE
//* file that was distributed with this source code.
//!
//! The following tokens are present in the expr grammar:
//! * integer literal;
//! * string literal;
//! * infix binary operators;
//! * prefix operators.
//!
//! According to the man-page of expr we have expression split into tokens (each token -- separate CLI-argument).
//! Hence all we need is to map the strings into the Token structures, except for some ugly fiddling with +-escaping.
//!
// spell-checker:ignore (ToDO) paren
use num_bigint::BigInt;
#[derive(Debug, Clone)]
pub enum Token {
Value {
value: String,
},
ParOpen,
ParClose,
InfixOp {
precedence: u8,
left_assoc: bool,
value: String,
},
PrefixOp {
arity: usize,
value: String,
},
}
impl Token {
fn new_infix_op(v: &str, left_assoc: bool, precedence: u8) -> Self {
Self::InfixOp {
left_assoc,
precedence,
value: v.into(),
}
}
fn new_value(v: &str) -> Self {
Self::Value { value: v.into() }
}
fn is_infix_plus(&self) -> bool {
match self {
Self::InfixOp { value,.. } => value == "+",
_ => false,
}
}
fn is_a_number(&self) -> bool {
match self {
Self::Value { value,.. } => value.parse::<BigInt>().is_ok(),
_ => false,
}
}
fn is_a_close_paren(&self) -> bool {
matches!(*self, Token::ParClose)
}
}
pub fn | (strings: &[String]) -> Result<Vec<(usize, Token)>, String> {
let mut tokens_acc = Vec::with_capacity(strings.len());
let mut tok_idx = 1;
for s in strings {
let token_if_not_escaped = match s.as_ref() {
"(" => Token::ParOpen,
")" => Token::ParClose,
"^" => Token::new_infix_op(s, false, 7),
":" => Token::new_infix_op(s, true, 6),
"*" | "/" | "%" => Token::new_infix_op(s, true, 5),
"+" | "-" => Token::new_infix_op(s, true, 4),
"=" | "!=" | "<" | ">" | "<=" | ">=" => Token::new_infix_op(s, true, 3),
"&" => Token::new_infix_op(s, true, 2),
"|" => Token::new_infix_op(s, true, 1),
"match" | "index" => Token::PrefixOp {
arity: 2,
value: s.clone(),
},
"substr" => Token::PrefixOp {
arity: 3,
value: s.clone(),
},
"length" => Token::PrefixOp {
arity: 1,
value: s.clone(),
},
_ => Token::new_value(s),
};
push_token_if_not_escaped(&mut tokens_acc, tok_idx, token_if_not_escaped, s);
tok_idx += 1;
}
maybe_dump_tokens_acc(&tokens_acc);
Ok(tokens_acc)
}
fn maybe_dump_tokens_acc(tokens_acc: &[(usize, Token)]) {
use std::env;
if let Ok(debug_var) = env::var("EXPR_DEBUG_TOKENS") {
if debug_var == "1" {
println!("EXPR_DEBUG_TOKENS");
for token in tokens_acc {
println!("\t{:?}", token);
}
}
}
}
fn push_token_if_not_escaped(acc: &mut Vec<(usize, Token)>, tok_idx: usize, token: Token, s: &str) {
// Smells heuristics... :(
let prev_is_plus = match acc.last() {
None => false,
Some(t) => t.1.is_infix_plus(),
};
let should_use_as_escaped = if prev_is_plus && acc.len() >= 2 {
let pre_prev = &acc[acc.len() - 2];
!(pre_prev.1.is_a_number() || pre_prev.1.is_a_close_paren())
} else {
prev_is_plus
};
if should_use_as_escaped {
acc.pop();
acc.push((tok_idx, Token::new_value(s)));
} else {
acc.push((tok_idx, token));
}
}
| strings_to_tokens | identifier_name |
tokens.rs | //* This file is part of the uutils coreutils package.
//*
//* (c) Roman Gafiyatullin <[email protected]>
//*
//* For the full copyright and license information, please view the LICENSE
//* file that was distributed with this source code.
//!
//! The following tokens are present in the expr grammar:
//! * integer literal;
//! * string literal;
//! * infix binary operators;
//! * prefix operators.
//!
//! According to the man-page of expr we have expression split into tokens (each token -- separate CLI-argument).
//! Hence all we need is to map the strings into the Token structures, except for some ugly fiddling with +-escaping.
//!
// spell-checker:ignore (ToDO) paren
use num_bigint::BigInt;
#[derive(Debug, Clone)]
pub enum Token {
Value {
value: String,
},
ParOpen,
ParClose,
InfixOp {
precedence: u8,
left_assoc: bool,
value: String,
},
PrefixOp {
arity: usize,
value: String,
},
}
impl Token {
fn new_infix_op(v: &str, left_assoc: bool, precedence: u8) -> Self {
Self::InfixOp {
left_assoc,
precedence,
value: v.into(),
}
}
fn new_value(v: &str) -> Self {
Self::Value { value: v.into() }
}
fn is_infix_plus(&self) -> bool {
match self {
Self::InfixOp { value,.. } => value == "+",
_ => false,
}
}
fn is_a_number(&self) -> bool {
match self {
Self::Value { value,.. } => value.parse::<BigInt>().is_ok(),
_ => false,
}
}
fn is_a_close_paren(&self) -> bool {
matches!(*self, Token::ParClose)
}
}
pub fn strings_to_tokens(strings: &[String]) -> Result<Vec<(usize, Token)>, String> {
let mut tokens_acc = Vec::with_capacity(strings.len());
let mut tok_idx = 1;
for s in strings {
let token_if_not_escaped = match s.as_ref() {
"(" => Token::ParOpen,
")" => Token::ParClose,
"^" => Token::new_infix_op(s, false, 7),
":" => Token::new_infix_op(s, true, 6),
"*" | "/" | "%" => Token::new_infix_op(s, true, 5),
"+" | "-" => Token::new_infix_op(s, true, 4),
"=" | "!=" | "<" | ">" | "<=" | ">=" => Token::new_infix_op(s, true, 3),
"&" => Token::new_infix_op(s, true, 2),
"|" => Token::new_infix_op(s, true, 1),
"match" | "index" => Token::PrefixOp {
arity: 2,
value: s.clone(),
},
"substr" => Token::PrefixOp {
arity: 3,
value: s.clone(),
},
"length" => Token::PrefixOp {
arity: 1,
value: s.clone(),
},
_ => Token::new_value(s),
};
push_token_if_not_escaped(&mut tokens_acc, tok_idx, token_if_not_escaped, s);
tok_idx += 1;
}
maybe_dump_tokens_acc(&tokens_acc);
Ok(tokens_acc)
}
fn maybe_dump_tokens_acc(tokens_acc: &[(usize, Token)]) {
use std::env;
if let Ok(debug_var) = env::var("EXPR_DEBUG_TOKENS") {
if debug_var == "1" {
println!("EXPR_DEBUG_TOKENS");
for token in tokens_acc {
println!("\t{:?}", token);
}
}
}
}
fn push_token_if_not_escaped(acc: &mut Vec<(usize, Token)>, tok_idx: usize, token: Token, s: &str) {
// Smells heuristics... :(
let prev_is_plus = match acc.last() {
None => false,
Some(t) => t.1.is_infix_plus(),
};
let should_use_as_escaped = if prev_is_plus && acc.len() >= 2 {
let pre_prev = &acc[acc.len() - 2];
!(pre_prev.1.is_a_number() || pre_prev.1.is_a_close_paren())
} else {
prev_is_plus
};
if should_use_as_escaped | else {
acc.push((tok_idx, token));
}
}
| {
acc.pop();
acc.push((tok_idx, Token::new_value(s)));
} | conditional_block |
tokens.rs | //* This file is part of the uutils coreutils package.
//*
//* (c) Roman Gafiyatullin <[email protected]>
//*
//* For the full copyright and license information, please view the LICENSE
//* file that was distributed with this source code.
//!
//! The following tokens are present in the expr grammar:
//! * integer literal;
//! * string literal;
//! * infix binary operators;
//! * prefix operators.
//!
//! According to the man-page of expr we have expression split into tokens (each token -- separate CLI-argument).
//! Hence all we need is to map the strings into the Token structures, except for some ugly fiddling with +-escaping.
//!
// spell-checker:ignore (ToDO) paren
use num_bigint::BigInt;
#[derive(Debug, Clone)]
pub enum Token {
Value {
value: String,
},
ParOpen,
ParClose,
InfixOp {
precedence: u8,
left_assoc: bool,
value: String,
},
PrefixOp {
arity: usize,
value: String,
},
}
impl Token {
fn new_infix_op(v: &str, left_assoc: bool, precedence: u8) -> Self {
Self::InfixOp {
left_assoc,
precedence,
value: v.into(),
}
}
fn new_value(v: &str) -> Self {
Self::Value { value: v.into() }
}
fn is_infix_plus(&self) -> bool {
match self {
Self::InfixOp { value,.. } => value == "+",
_ => false,
}
}
fn is_a_number(&self) -> bool {
match self {
Self::Value { value,.. } => value.parse::<BigInt>().is_ok(),
_ => false,
}
}
fn is_a_close_paren(&self) -> bool |
}
pub fn strings_to_tokens(strings: &[String]) -> Result<Vec<(usize, Token)>, String> {
let mut tokens_acc = Vec::with_capacity(strings.len());
let mut tok_idx = 1;
for s in strings {
let token_if_not_escaped = match s.as_ref() {
"(" => Token::ParOpen,
")" => Token::ParClose,
"^" => Token::new_infix_op(s, false, 7),
":" => Token::new_infix_op(s, true, 6),
"*" | "/" | "%" => Token::new_infix_op(s, true, 5),
"+" | "-" => Token::new_infix_op(s, true, 4),
"=" | "!=" | "<" | ">" | "<=" | ">=" => Token::new_infix_op(s, true, 3),
"&" => Token::new_infix_op(s, true, 2),
"|" => Token::new_infix_op(s, true, 1),
"match" | "index" => Token::PrefixOp {
arity: 2,
value: s.clone(),
},
"substr" => Token::PrefixOp {
arity: 3,
value: s.clone(),
},
"length" => Token::PrefixOp {
arity: 1,
value: s.clone(),
},
_ => Token::new_value(s),
};
push_token_if_not_escaped(&mut tokens_acc, tok_idx, token_if_not_escaped, s);
tok_idx += 1;
}
maybe_dump_tokens_acc(&tokens_acc);
Ok(tokens_acc)
}
fn maybe_dump_tokens_acc(tokens_acc: &[(usize, Token)]) {
use std::env;
if let Ok(debug_var) = env::var("EXPR_DEBUG_TOKENS") {
if debug_var == "1" {
println!("EXPR_DEBUG_TOKENS");
for token in tokens_acc {
println!("\t{:?}", token);
}
}
}
}
fn push_token_if_not_escaped(acc: &mut Vec<(usize, Token)>, tok_idx: usize, token: Token, s: &str) {
// Smells heuristics... :(
let prev_is_plus = match acc.last() {
None => false,
Some(t) => t.1.is_infix_plus(),
};
let should_use_as_escaped = if prev_is_plus && acc.len() >= 2 {
let pre_prev = &acc[acc.len() - 2];
!(pre_prev.1.is_a_number() || pre_prev.1.is_a_close_paren())
} else {
prev_is_plus
};
if should_use_as_escaped {
acc.pop();
acc.push((tok_idx, Token::new_value(s)));
} else {
acc.push((tok_idx, token));
}
}
| {
matches!(*self, Token::ParClose)
} | identifier_body |
tokens.rs | //* This file is part of the uutils coreutils package.
//*
//* (c) Roman Gafiyatullin <[email protected]>
//*
//* For the full copyright and license information, please view the LICENSE
//* file that was distributed with this source code.
//!
//! The following tokens are present in the expr grammar:
//! * integer literal;
//! * string literal;
//! * infix binary operators;
//! * prefix operators.
//!
//! According to the man-page of expr we have expression split into tokens (each token -- separate CLI-argument).
//! Hence all we need is to map the strings into the Token structures, except for some ugly fiddling with +-escaping.
//!
// spell-checker:ignore (ToDO) paren
use num_bigint::BigInt;
#[derive(Debug, Clone)]
pub enum Token {
Value {
value: String,
},
ParOpen,
ParClose,
InfixOp {
precedence: u8,
left_assoc: bool,
value: String,
},
PrefixOp {
arity: usize,
value: String,
},
}
impl Token {
fn new_infix_op(v: &str, left_assoc: bool, precedence: u8) -> Self {
Self::InfixOp {
left_assoc,
precedence,
value: v.into(),
}
}
fn new_value(v: &str) -> Self {
Self::Value { value: v.into() }
}
fn is_infix_plus(&self) -> bool {
match self {
Self::InfixOp { value,.. } => value == "+",
_ => false,
}
}
fn is_a_number(&self) -> bool {
match self {
Self::Value { value,.. } => value.parse::<BigInt>().is_ok(),
_ => false,
}
}
fn is_a_close_paren(&self) -> bool {
matches!(*self, Token::ParClose)
}
}
pub fn strings_to_tokens(strings: &[String]) -> Result<Vec<(usize, Token)>, String> {
let mut tokens_acc = Vec::with_capacity(strings.len());
let mut tok_idx = 1;
for s in strings {
let token_if_not_escaped = match s.as_ref() {
"(" => Token::ParOpen,
")" => Token::ParClose,
"^" => Token::new_infix_op(s, false, 7),
":" => Token::new_infix_op(s, true, 6),
"*" | "/" | "%" => Token::new_infix_op(s, true, 5),
"+" | "-" => Token::new_infix_op(s, true, 4),
"=" | "!=" | "<" | ">" | "<=" | ">=" => Token::new_infix_op(s, true, 3),
"&" => Token::new_infix_op(s, true, 2),
"|" => Token::new_infix_op(s, true, 1),
"match" | "index" => Token::PrefixOp {
arity: 2,
value: s.clone(),
},
"substr" => Token::PrefixOp {
arity: 3,
value: s.clone(),
},
"length" => Token::PrefixOp {
arity: 1,
value: s.clone(),
},
_ => Token::new_value(s),
};
push_token_if_not_escaped(&mut tokens_acc, tok_idx, token_if_not_escaped, s);
tok_idx += 1;
}
maybe_dump_tokens_acc(&tokens_acc);
Ok(tokens_acc)
}
fn maybe_dump_tokens_acc(tokens_acc: &[(usize, Token)]) {
use std::env;
| if debug_var == "1" {
println!("EXPR_DEBUG_TOKENS");
for token in tokens_acc {
println!("\t{:?}", token);
}
}
}
}
fn push_token_if_not_escaped(acc: &mut Vec<(usize, Token)>, tok_idx: usize, token: Token, s: &str) {
// Smells heuristics... :(
let prev_is_plus = match acc.last() {
None => false,
Some(t) => t.1.is_infix_plus(),
};
let should_use_as_escaped = if prev_is_plus && acc.len() >= 2 {
let pre_prev = &acc[acc.len() - 2];
!(pre_prev.1.is_a_number() || pre_prev.1.is_a_close_paren())
} else {
prev_is_plus
};
if should_use_as_escaped {
acc.pop();
acc.push((tok_idx, Token::new_value(s)));
} else {
acc.push((tok_idx, token));
}
} | if let Ok(debug_var) = env::var("EXPR_DEBUG_TOKENS") { | random_line_split |
rscope.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty;
use std::cell::Cell;
use syntax::ast;
use syntax::codemap::Span;
/// Defines strategies for handling regions that are omitted. For
/// example, if one writes the type `&Foo`, then the lifetime of
/// this reference has been omitted. When converting this
/// type, the generic functions in astconv will invoke `anon_regions`
/// on the provided region-scope to decide how to translate this
/// omitted region.
///
/// It is not always legal to omit regions, therefore `anon_regions`
/// can return `Err(())` to indicate that this is not a scope in which
/// regions can legally be omitted.
pub trait RegionScope {
fn anon_regions(&self,
span: Span,
count: uint)
-> Result<Vec<ty::Region>, ()>;
}
// A scope in which all regions must be explicitly named
pub struct ExplicitRscope;
impl RegionScope for ExplicitRscope {
fn anon_regions(&self,
_span: Span,
_count: uint)
-> Result<Vec<ty::Region>, ()> {
Err(())
}
}
/// A scope in which we generate anonymous, late-bound regions for
/// omitted regions. This occurs in function signatures.
pub struct BindingRscope {
binder_id: ast::NodeId,
anon_bindings: Cell<uint>,
}
impl BindingRscope {
pub fn new(binder_id: ast::NodeId) -> BindingRscope {
BindingRscope {
binder_id: binder_id,
anon_bindings: Cell::new(0),
}
}
}
impl RegionScope for BindingRscope {
fn anon_regions(&self,
_: Span,
count: uint)
-> Result<Vec<ty::Region>, ()> {
let idx = self.anon_bindings.get();
self.anon_bindings.set(idx + count);
Ok(Vec::from_fn(count, |i| ty::ReLateBound(self.binder_id,
ty::BrAnon(idx + i))))
}
}
/// A scope in which we generate one specific region. This occurs after the
/// `->` (i.e. in the return type) of function signatures.
pub struct ImpliedSingleRscope {
pub region: ty::Region,
}
impl RegionScope for ImpliedSingleRscope {
fn | (&self, _: Span, count: uint)
-> Result<Vec<ty::Region>,()> {
Ok(Vec::from_elem(count, self.region.clone()))
}
}
| anon_regions | identifier_name |
rscope.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty;
use std::cell::Cell;
use syntax::ast;
use syntax::codemap::Span;
/// Defines strategies for handling regions that are omitted. For
/// example, if one writes the type `&Foo`, then the lifetime of
/// this reference has been omitted. When converting this
/// type, the generic functions in astconv will invoke `anon_regions`
/// on the provided region-scope to decide how to translate this
/// omitted region.
///
/// It is not always legal to omit regions, therefore `anon_regions`
/// can return `Err(())` to indicate that this is not a scope in which
/// regions can legally be omitted.
pub trait RegionScope {
fn anon_regions(&self,
span: Span,
count: uint)
-> Result<Vec<ty::Region>, ()>;
}
// A scope in which all regions must be explicitly named
pub struct ExplicitRscope;
impl RegionScope for ExplicitRscope {
fn anon_regions(&self,
_span: Span,
_count: uint)
-> Result<Vec<ty::Region>, ()> {
Err(())
}
}
/// A scope in which we generate anonymous, late-bound regions for
/// omitted regions. This occurs in function signatures.
pub struct BindingRscope {
binder_id: ast::NodeId,
anon_bindings: Cell<uint>,
}
impl BindingRscope {
pub fn new(binder_id: ast::NodeId) -> BindingRscope {
BindingRscope {
binder_id: binder_id,
anon_bindings: Cell::new(0),
}
}
}
impl RegionScope for BindingRscope {
fn anon_regions(&self,
_: Span,
count: uint)
-> Result<Vec<ty::Region>, ()> {
let idx = self.anon_bindings.get();
self.anon_bindings.set(idx + count);
Ok(Vec::from_fn(count, |i| ty::ReLateBound(self.binder_id,
ty::BrAnon(idx + i))))
}
}
/// A scope in which we generate one specific region. This occurs after the
/// `->` (i.e. in the return type) of function signatures.
pub struct ImpliedSingleRscope {
pub region: ty::Region,
}
impl RegionScope for ImpliedSingleRscope {
fn anon_regions(&self, _: Span, count: uint)
-> Result<Vec<ty::Region>,()> {
Ok(Vec::from_elem(count, self.region.clone()))
} | } | random_line_split |
|
borrowck-borrow-overloaded-auto-deref-mut.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test how overloaded deref interacts with borrows when DerefMut
// is implemented.
use std::ops::{Deref, DerefMut};
struct Own<T> {
value: *mut T
}
impl<T> Deref<T> for Own<T> {
fn deref<'a>(&'a self) -> &'a T {
unsafe { &*self.value }
}
}
impl<T> DerefMut<T> for Own<T> {
fn deref_mut<'a>(&'a mut self) -> &'a mut T {
unsafe { &mut *self.value }
}
}
struct Point {
x: int,
y: int
}
impl Point {
fn get(&self) -> (int, int) {
(self.x, self.y)
}
fn set(&mut self, x: int, y: int) {
self.x = x;
self.y = y;
}
fn x_ref<'a>(&'a self) -> &'a int {
&self.x
}
fn y_mut<'a>(&'a mut self) -> &'a mut int |
}
fn deref_imm_field(x: Own<Point>) {
let _i = &x.y;
}
fn deref_mut_field1(x: Own<Point>) {
let _i = &mut x.y; //~ ERROR cannot borrow
}
fn deref_mut_field2(mut x: Own<Point>) {
let _i = &mut x.y;
}
fn deref_extend_field<'a>(x: &'a Own<Point>) -> &'a int {
&x.y
}
fn deref_extend_mut_field1<'a>(x: &'a Own<Point>) -> &'a mut int {
&mut x.y //~ ERROR cannot borrow
}
fn deref_extend_mut_field2<'a>(x: &'a mut Own<Point>) -> &'a mut int {
&mut x.y
}
fn deref_extend_mut_field3<'a>(x: &'a mut Own<Point>) {
// Hmm, this is unfortunate, because with ~ it would work,
// but it's presently the expected outcome. See `deref_extend_mut_field4`
// for the workaround.
let _x = &mut x.x;
let _y = &mut x.y; //~ ERROR cannot borrow
}
fn deref_extend_mut_field4<'a>(x: &'a mut Own<Point>) {
let p = &mut **x;
let _x = &mut p.x;
let _y = &mut p.y;
}
fn assign_field1<'a>(x: Own<Point>) {
x.y = 3; //~ ERROR cannot borrow
}
fn assign_field2<'a>(x: &'a Own<Point>) {
x.y = 3; //~ ERROR cannot assign
}
fn assign_field3<'a>(x: &'a mut Own<Point>) {
x.y = 3;
}
fn assign_field4<'a>(x: &'a mut Own<Point>) {
let _p: &mut Point = &mut **x;
x.y = 3; //~ ERROR cannot borrow
}
// FIXME(eddyb) #12825 This shouldn't attempt to call deref_mut.
/*
fn deref_imm_method(x: Own<Point>) {
let _i = x.get();
}
*/
fn deref_mut_method1(x: Own<Point>) {
x.set(0, 0); //~ ERROR cannot borrow
}
fn deref_mut_method2(mut x: Own<Point>) {
x.set(0, 0);
}
fn deref_extend_method<'a>(x: &'a Own<Point>) -> &'a int {
x.x_ref()
}
fn deref_extend_mut_method1<'a>(x: &'a Own<Point>) -> &'a mut int {
x.y_mut() //~ ERROR cannot borrow
}
fn deref_extend_mut_method2<'a>(x: &'a mut Own<Point>) -> &'a mut int {
x.y_mut()
}
fn assign_method1<'a>(x: Own<Point>) {
*x.y_mut() = 3; //~ ERROR cannot borrow
}
fn assign_method2<'a>(x: &'a Own<Point>) {
*x.y_mut() = 3; //~ ERROR cannot borrow
}
fn assign_method3<'a>(x: &'a mut Own<Point>) {
*x.y_mut() = 3;
}
pub fn main() {}
| {
&mut self.y
} | identifier_body |
borrowck-borrow-overloaded-auto-deref-mut.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test how overloaded deref interacts with borrows when DerefMut
// is implemented.
use std::ops::{Deref, DerefMut};
struct Own<T> {
value: *mut T
}
impl<T> Deref<T> for Own<T> {
fn deref<'a>(&'a self) -> &'a T {
unsafe { &*self.value }
}
}
impl<T> DerefMut<T> for Own<T> {
fn deref_mut<'a>(&'a mut self) -> &'a mut T {
unsafe { &mut *self.value }
}
}
struct Point {
x: int,
y: int
}
impl Point {
fn get(&self) -> (int, int) {
(self.x, self.y)
}
fn set(&mut self, x: int, y: int) {
self.x = x;
self.y = y;
}
fn x_ref<'a>(&'a self) -> &'a int {
&self.x
}
fn y_mut<'a>(&'a mut self) -> &'a mut int {
&mut self.y
}
}
fn deref_imm_field(x: Own<Point>) {
let _i = &x.y;
}
fn deref_mut_field1(x: Own<Point>) {
let _i = &mut x.y; //~ ERROR cannot borrow
}
fn deref_mut_field2(mut x: Own<Point>) {
let _i = &mut x.y;
}
fn deref_extend_field<'a>(x: &'a Own<Point>) -> &'a int {
&x.y
}
fn deref_extend_mut_field1<'a>(x: &'a Own<Point>) -> &'a mut int {
&mut x.y //~ ERROR cannot borrow
}
fn deref_extend_mut_field2<'a>(x: &'a mut Own<Point>) -> &'a mut int {
&mut x.y
}
fn deref_extend_mut_field3<'a>(x: &'a mut Own<Point>) {
// Hmm, this is unfortunate, because with ~ it would work,
// but it's presently the expected outcome. See `deref_extend_mut_field4`
// for the workaround.
let _x = &mut x.x;
let _y = &mut x.y; //~ ERROR cannot borrow
}
fn deref_extend_mut_field4<'a>(x: &'a mut Own<Point>) {
let p = &mut **x;
let _x = &mut p.x;
let _y = &mut p.y;
}
fn assign_field1<'a>(x: Own<Point>) {
x.y = 3; //~ ERROR cannot borrow
}
fn assign_field2<'a>(x: &'a Own<Point>) {
x.y = 3; //~ ERROR cannot assign
}
fn | <'a>(x: &'a mut Own<Point>) {
x.y = 3;
}
fn assign_field4<'a>(x: &'a mut Own<Point>) {
let _p: &mut Point = &mut **x;
x.y = 3; //~ ERROR cannot borrow
}
// FIXME(eddyb) #12825 This shouldn't attempt to call deref_mut.
/*
fn deref_imm_method(x: Own<Point>) {
let _i = x.get();
}
*/
fn deref_mut_method1(x: Own<Point>) {
x.set(0, 0); //~ ERROR cannot borrow
}
fn deref_mut_method2(mut x: Own<Point>) {
x.set(0, 0);
}
fn deref_extend_method<'a>(x: &'a Own<Point>) -> &'a int {
x.x_ref()
}
fn deref_extend_mut_method1<'a>(x: &'a Own<Point>) -> &'a mut int {
x.y_mut() //~ ERROR cannot borrow
}
fn deref_extend_mut_method2<'a>(x: &'a mut Own<Point>) -> &'a mut int {
x.y_mut()
}
fn assign_method1<'a>(x: Own<Point>) {
*x.y_mut() = 3; //~ ERROR cannot borrow
}
fn assign_method2<'a>(x: &'a Own<Point>) {
*x.y_mut() = 3; //~ ERROR cannot borrow
}
fn assign_method3<'a>(x: &'a mut Own<Point>) {
*x.y_mut() = 3;
}
pub fn main() {}
| assign_field3 | identifier_name |
borrowck-borrow-overloaded-auto-deref-mut.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test how overloaded deref interacts with borrows when DerefMut
// is implemented.
use std::ops::{Deref, DerefMut};
struct Own<T> {
value: *mut T
}
impl<T> Deref<T> for Own<T> {
fn deref<'a>(&'a self) -> &'a T {
unsafe { &*self.value }
}
}
impl<T> DerefMut<T> for Own<T> {
fn deref_mut<'a>(&'a mut self) -> &'a mut T {
unsafe { &mut *self.value }
}
}
struct Point {
x: int,
y: int
}
impl Point {
fn get(&self) -> (int, int) {
(self.x, self.y)
}
fn set(&mut self, x: int, y: int) {
self.x = x;
self.y = y;
}
fn x_ref<'a>(&'a self) -> &'a int {
&self.x
}
fn y_mut<'a>(&'a mut self) -> &'a mut int {
&mut self.y
}
}
fn deref_imm_field(x: Own<Point>) {
let _i = &x.y;
}
fn deref_mut_field1(x: Own<Point>) {
let _i = &mut x.y; //~ ERROR cannot borrow
}
fn deref_mut_field2(mut x: Own<Point>) {
let _i = &mut x.y;
}
fn deref_extend_field<'a>(x: &'a Own<Point>) -> &'a int {
&x.y
}
fn deref_extend_mut_field1<'a>(x: &'a Own<Point>) -> &'a mut int {
&mut x.y //~ ERROR cannot borrow
}
fn deref_extend_mut_field2<'a>(x: &'a mut Own<Point>) -> &'a mut int {
&mut x.y
}
fn deref_extend_mut_field3<'a>(x: &'a mut Own<Point>) {
// Hmm, this is unfortunate, because with ~ it would work,
// but it's presently the expected outcome. See `deref_extend_mut_field4`
// for the workaround.
let _x = &mut x.x;
let _y = &mut x.y; //~ ERROR cannot borrow
}
fn deref_extend_mut_field4<'a>(x: &'a mut Own<Point>) {
let p = &mut **x;
let _x = &mut p.x;
let _y = &mut p.y;
}
fn assign_field1<'a>(x: Own<Point>) {
x.y = 3; //~ ERROR cannot borrow
}
fn assign_field2<'a>(x: &'a Own<Point>) {
x.y = 3; //~ ERROR cannot assign
}
fn assign_field3<'a>(x: &'a mut Own<Point>) {
x.y = 3;
}
fn assign_field4<'a>(x: &'a mut Own<Point>) {
let _p: &mut Point = &mut **x;
x.y = 3; //~ ERROR cannot borrow
}
// FIXME(eddyb) #12825 This shouldn't attempt to call deref_mut.
/*
fn deref_imm_method(x: Own<Point>) {
let _i = x.get();
}
*/
fn deref_mut_method1(x: Own<Point>) {
x.set(0, 0); //~ ERROR cannot borrow
}
fn deref_mut_method2(mut x: Own<Point>) {
x.set(0, 0);
}
fn deref_extend_method<'a>(x: &'a Own<Point>) -> &'a int {
x.x_ref()
}
fn deref_extend_mut_method1<'a>(x: &'a Own<Point>) -> &'a mut int {
x.y_mut() //~ ERROR cannot borrow
} | fn assign_method1<'a>(x: Own<Point>) {
*x.y_mut() = 3; //~ ERROR cannot borrow
}
fn assign_method2<'a>(x: &'a Own<Point>) {
*x.y_mut() = 3; //~ ERROR cannot borrow
}
fn assign_method3<'a>(x: &'a mut Own<Point>) {
*x.y_mut() = 3;
}
pub fn main() {} |
fn deref_extend_mut_method2<'a>(x: &'a mut Own<Point>) -> &'a mut int {
x.y_mut()
}
| random_line_split |
get.rs | use super::with_path;
use util::*;
use hyper::client::Response;
use hyper::status::StatusCode;
fn with_query<F>(query: &str, f: F) where F: FnOnce(&mut Response) {
with_path(&format!("/get?{}", query), f)
}
fn assert_accepted(query: &str) {
with_query(query, |res| {
let s = read_body_to_string(res);
assert_eq!(res.status, StatusCode::Ok);
assert_eq!(s, "Congratulations on conforming!");
})
}
fn assert_rejected(query: &str) {
with_query(query, |res| {
assert_eq!(res.status, StatusCode::BadRequest)
})
}
mod accepts {
use super::assert_accepted;
#[test]
fn valid() {
assert_accepted("state=valid")
}
#[test]
fn | () {
assert_accepted("state=valid&state=foo")
}
}
mod rejects {
use super::assert_rejected;
#[test]
fn invalid() {
assert_rejected("state=foo")
}
#[test]
fn other_keys() {
assert_rejected("valid=valid")
}
#[test]
fn empty() {
assert_rejected("")
}
#[test]
fn second_valid() {
assert_rejected("state=foo&state=valid")
}
}
| first_valid | identifier_name |
get.rs | use super::with_path;
use util::*;
use hyper::client::Response;
use hyper::status::StatusCode;
fn with_query<F>(query: &str, f: F) where F: FnOnce(&mut Response) {
with_path(&format!("/get?{}", query), f)
}
fn assert_accepted(query: &str) {
with_query(query, |res| {
let s = read_body_to_string(res);
assert_eq!(res.status, StatusCode::Ok);
assert_eq!(s, "Congratulations on conforming!");
})
}
fn assert_rejected(query: &str) {
with_query(query, |res| {
assert_eq!(res.status, StatusCode::BadRequest)
})
}
mod accepts {
use super::assert_accepted;
#[test]
fn valid() {
assert_accepted("state=valid")
}
#[test]
fn first_valid() |
}
mod rejects {
use super::assert_rejected;
#[test]
fn invalid() {
assert_rejected("state=foo")
}
#[test]
fn other_keys() {
assert_rejected("valid=valid")
}
#[test]
fn empty() {
assert_rejected("")
}
#[test]
fn second_valid() {
assert_rejected("state=foo&state=valid")
}
}
| {
assert_accepted("state=valid&state=foo")
} | identifier_body |
get.rs | use super::with_path;
use util::*;
use hyper::client::Response;
use hyper::status::StatusCode;
fn with_query<F>(query: &str, f: F) where F: FnOnce(&mut Response) {
with_path(&format!("/get?{}", query), f)
}
fn assert_accepted(query: &str) {
with_query(query, |res| {
let s = read_body_to_string(res);
assert_eq!(res.status, StatusCode::Ok);
assert_eq!(s, "Congratulations on conforming!");
})
}
fn assert_rejected(query: &str) {
with_query(query, |res| {
assert_eq!(res.status, StatusCode::BadRequest)
})
}
mod accepts {
use super::assert_accepted;
#[test]
fn valid() {
assert_accepted("state=valid")
}
#[test]
fn first_valid() {
assert_accepted("state=valid&state=foo")
}
}
mod rejects { | #[test]
fn invalid() {
assert_rejected("state=foo")
}
#[test]
fn other_keys() {
assert_rejected("valid=valid")
}
#[test]
fn empty() {
assert_rejected("")
}
#[test]
fn second_valid() {
assert_rejected("state=foo&state=valid")
}
} | use super::assert_rejected;
| random_line_split |
record_type.rs | use crate::library;
#[derive(PartialEq, Eq)]
pub enum | {
/// Boxed record that use g_boxed_copy, g_boxed_free.
/// Must have glib_get_type function
AutoBoxed,
/// Boxed record with custom copy/free functions
Boxed,
/// Referencecounted record
Refcounted,
//TODO: detect and generate direct records
//Direct,
}
impl RecordType {
pub fn of(record: &library::Record) -> RecordType {
let mut has_copy = false;
let mut has_free = false;
let mut has_ref = false;
let mut has_unref = false;
let mut has_destroy = false;
for func in &record.functions {
match &func.name[..] {
"copy" => has_copy = true,
"free" => has_free = true,
"destroy" => has_destroy = true,
"ref" => has_ref = true,
"unref" => has_unref = true,
_ => (),
}
}
if has_destroy && has_copy {
has_free = true;
}
if has_ref && has_unref {
RecordType::Refcounted
} else if has_copy && has_free {
RecordType::Boxed
} else {
RecordType::AutoBoxed
}
}
}
| RecordType | identifier_name |
record_type.rs | use crate::library;
#[derive(PartialEq, Eq)]
pub enum RecordType {
/// Boxed record that use g_boxed_copy, g_boxed_free.
/// Must have glib_get_type function
AutoBoxed,
/// Boxed record with custom copy/free functions
Boxed,
/// Referencecounted record
Refcounted,
//TODO: detect and generate direct records
//Direct,
}
impl RecordType {
pub fn of(record: &library::Record) -> RecordType |
if has_ref && has_unref {
RecordType::Refcounted
} else if has_copy && has_free {
RecordType::Boxed
} else {
RecordType::AutoBoxed
}
}
}
| {
let mut has_copy = false;
let mut has_free = false;
let mut has_ref = false;
let mut has_unref = false;
let mut has_destroy = false;
for func in &record.functions {
match &func.name[..] {
"copy" => has_copy = true,
"free" => has_free = true,
"destroy" => has_destroy = true,
"ref" => has_ref = true,
"unref" => has_unref = true,
_ => (),
}
}
if has_destroy && has_copy {
has_free = true;
} | identifier_body |
record_type.rs | use crate::library;
#[derive(PartialEq, Eq)]
pub enum RecordType {
/// Boxed record that use g_boxed_copy, g_boxed_free.
/// Must have glib_get_type function
AutoBoxed,
/// Boxed record with custom copy/free functions
Boxed,
/// Referencecounted record
Refcounted,
//TODO: detect and generate direct records
//Direct,
}
impl RecordType {
pub fn of(record: &library::Record) -> RecordType {
let mut has_copy = false;
let mut has_free = false;
let mut has_ref = false;
let mut has_unref = false;
let mut has_destroy = false;
for func in &record.functions {
match &func.name[..] {
"copy" => has_copy = true,
"free" => has_free = true,
"destroy" => has_destroy = true,
"ref" => has_ref = true,
"unref" => has_unref = true,
_ => (),
}
}
if has_destroy && has_copy {
has_free = true;
}
if has_ref && has_unref {
RecordType::Refcounted
} else if has_copy && has_free {
RecordType::Boxed | } | } else {
RecordType::AutoBoxed
}
} | random_line_split |
lib.rs | #![cfg_attr(feature = "clippy", feature(plugin))]
#![cfg_attr(feature = "clippy", plugin(clippy))]
//!Load Cifar10
//!
//!Cifar10 Simple Loader
//!
//!Use image crate in CifarImage.
//!
//!##Examples
//!
//! Download CIFAR-10 binary version and extract.
//!
//!```
//!# extern crate cifar_10_loader;
//!# use cifar_10_loader::CifarDataset;
//!# fn main()
//!# {
//!//This path is directory of cifar-10-batches-bin.
//!//It's extracted from CIFAR-10 binary version.
//!let cifar10_path = "./cifar-10-batches-bin/";
//!let cifar_dataset = CifarDataset::new(cifar10_path).unwrap();
//! # }
//!```
//#![deny(missing_docs)]
pub use self::image_pub::CifarImage;
pub use self::dataset::CifarDataset;
use self::image_private::CifarImageTrait;
mod image_private;
mod image_pub;
mod dataset;
#[cfg(test)] | mod test; | random_line_split |
|
complex_query.rs | extern crate rustorm;
extern crate uuid;
extern crate chrono;
extern crate rustc_serialize;
use rustorm::query::Query;
use rustorm::query::{Filter,Equality};
use rustorm::dao::{Dao,IsDao};
use gen::bazaar::Product;
use gen::bazaar::product;
use gen::bazaar::Photo;
use gen::bazaar::photo;
use gen::bazaar::Review;
use gen::bazaar::review;
use gen::bazaar::Category;
use gen::bazaar::category;
use gen::bazaar::product_category;
use gen::bazaar::ProductCategory;
use gen::bazaar::product_photo;
use gen::bazaar::ProductPhoto;
use gen::bazaar::ProductAvailability;
use gen::bazaar::product_availability;
use gen::bazaar;
use rustorm::table::IsTable;
use rustorm::pool::ManagedPool;
use rustorm::query::HasEquality;
use rustorm::query::QueryBuilder;
use rustorm::query::ToTableName;
use rustorm::query::function::COUNT;
use rustorm::query::order::HasDirection;
use rustorm::query::join::ToJoin;
use rustorm::query::operand::ToOperand;
use rustorm::query::builder::SELECT_ALL;
mod gen;
fn main(){
let mut pool = ManagedPool::init("postgres://postgres:p0stgr3s@localhost/bazaar_v8",1).unwrap();
let db = pool.connect().unwrap();
let frag = SELECT_ALL().FROM(&bazaar::product)
.LEFT_JOIN(bazaar::product_category
.ON(
product_category::product_id.EQ(&product::product_id)
.AND(product_category::product_id.EQ(&product::product_id))
)
)
.LEFT_JOIN(bazaar::category
.ON(category::category_id.EQ(&product_category::category_id)))
.LEFT_JOIN(bazaar::product_photo
.ON(product::product_id.EQ(&product_photo::product_id)))
.LEFT_JOIN(bazaar::photo
.ON(product_photo::photo_id.EQ(&photo::photo_id)))
.WHERE(
product::name.EQ(&"GTX660 Ti videocard".to_owned())
.AND(category::name.EQ(&"Electronic".to_owned())) | )
.GROUP_BY(&[category::name])
.HAVING(COUNT(&"*").GT(&1))
.HAVING(COUNT(&product::product_id).GT(&1))
.ORDER_BY(&[product::name.ASC(), product::created.DESC()])
.build(db.as_ref());
let expected = "
SELECT *
FROM bazaar.product
LEFT JOIN bazaar.product_category
ON ( product_category.product_id = product.product_id AND product_category.product_id = product.product_id )
LEFT JOIN bazaar.category
ON category.category_id = product_category.category_id
LEFT JOIN bazaar.product_photo
ON product.product_id = product_photo.product_id
LEFT JOIN bazaar.photo
ON product_photo.photo_id = photo.photo_id
WHERE ( product.name = $1 AND category.name = $2 )
GROUP BY category.name
HAVING COUNT(*) > $3 , COUNT(product.product_id) > $4
ORDER BY product.name ASC, product.created DESC
".to_string();
println!("actual: {{{}}} [{}]", frag.sql, frag.sql.len());
println!("expected: {{{}}} [{}]", expected, expected.len());
assert!(frag.sql.trim() == expected.trim());
} | random_line_split |
|
complex_query.rs | extern crate rustorm;
extern crate uuid;
extern crate chrono;
extern crate rustc_serialize;
use rustorm::query::Query;
use rustorm::query::{Filter,Equality};
use rustorm::dao::{Dao,IsDao};
use gen::bazaar::Product;
use gen::bazaar::product;
use gen::bazaar::Photo;
use gen::bazaar::photo;
use gen::bazaar::Review;
use gen::bazaar::review;
use gen::bazaar::Category;
use gen::bazaar::category;
use gen::bazaar::product_category;
use gen::bazaar::ProductCategory;
use gen::bazaar::product_photo;
use gen::bazaar::ProductPhoto;
use gen::bazaar::ProductAvailability;
use gen::bazaar::product_availability;
use gen::bazaar;
use rustorm::table::IsTable;
use rustorm::pool::ManagedPool;
use rustorm::query::HasEquality;
use rustorm::query::QueryBuilder;
use rustorm::query::ToTableName;
use rustorm::query::function::COUNT;
use rustorm::query::order::HasDirection;
use rustorm::query::join::ToJoin;
use rustorm::query::operand::ToOperand;
use rustorm::query::builder::SELECT_ALL;
mod gen;
fn main() | )
.GROUP_BY(&[category::name])
.HAVING(COUNT(&"*").GT(&1))
.HAVING(COUNT(&product::product_id).GT(&1))
.ORDER_BY(&[product::name.ASC(), product::created.DESC()])
.build(db.as_ref());
let expected = "
SELECT *
FROM bazaar.product
LEFT JOIN bazaar.product_category
ON ( product_category.product_id = product.product_id AND product_category.product_id = product.product_id )
LEFT JOIN bazaar.category
ON category.category_id = product_category.category_id
LEFT JOIN bazaar.product_photo
ON product.product_id = product_photo.product_id
LEFT JOIN bazaar.photo
ON product_photo.photo_id = photo.photo_id
WHERE ( product.name = $1 AND category.name = $2 )
GROUP BY category.name
HAVING COUNT(*) > $3 , COUNT(product.product_id) > $4
ORDER BY product.name ASC, product.created DESC
".to_string();
println!("actual: {{{}}} [{}]", frag.sql, frag.sql.len());
println!("expected: {{{}}} [{}]", expected, expected.len());
assert!(frag.sql.trim() == expected.trim());
}
| {
let mut pool = ManagedPool::init("postgres://postgres:p0stgr3s@localhost/bazaar_v8",1).unwrap();
let db = pool.connect().unwrap();
let frag = SELECT_ALL().FROM(&bazaar::product)
.LEFT_JOIN(bazaar::product_category
.ON(
product_category::product_id.EQ(&product::product_id)
.AND(product_category::product_id.EQ(&product::product_id))
)
)
.LEFT_JOIN(bazaar::category
.ON(category::category_id.EQ(&product_category::category_id)))
.LEFT_JOIN(bazaar::product_photo
.ON(product::product_id.EQ(&product_photo::product_id)))
.LEFT_JOIN(bazaar::photo
.ON(product_photo::photo_id.EQ(&photo::photo_id)))
.WHERE(
product::name.EQ(&"GTX660 Ti videocard".to_owned())
.AND(category::name.EQ(&"Electronic".to_owned())) | identifier_body |
complex_query.rs | extern crate rustorm;
extern crate uuid;
extern crate chrono;
extern crate rustc_serialize;
use rustorm::query::Query;
use rustorm::query::{Filter,Equality};
use rustorm::dao::{Dao,IsDao};
use gen::bazaar::Product;
use gen::bazaar::product;
use gen::bazaar::Photo;
use gen::bazaar::photo;
use gen::bazaar::Review;
use gen::bazaar::review;
use gen::bazaar::Category;
use gen::bazaar::category;
use gen::bazaar::product_category;
use gen::bazaar::ProductCategory;
use gen::bazaar::product_photo;
use gen::bazaar::ProductPhoto;
use gen::bazaar::ProductAvailability;
use gen::bazaar::product_availability;
use gen::bazaar;
use rustorm::table::IsTable;
use rustorm::pool::ManagedPool;
use rustorm::query::HasEquality;
use rustorm::query::QueryBuilder;
use rustorm::query::ToTableName;
use rustorm::query::function::COUNT;
use rustorm::query::order::HasDirection;
use rustorm::query::join::ToJoin;
use rustorm::query::operand::ToOperand;
use rustorm::query::builder::SELECT_ALL;
mod gen;
fn | (){
let mut pool = ManagedPool::init("postgres://postgres:p0stgr3s@localhost/bazaar_v8",1).unwrap();
let db = pool.connect().unwrap();
let frag = SELECT_ALL().FROM(&bazaar::product)
.LEFT_JOIN(bazaar::product_category
.ON(
product_category::product_id.EQ(&product::product_id)
.AND(product_category::product_id.EQ(&product::product_id))
)
)
.LEFT_JOIN(bazaar::category
.ON(category::category_id.EQ(&product_category::category_id)))
.LEFT_JOIN(bazaar::product_photo
.ON(product::product_id.EQ(&product_photo::product_id)))
.LEFT_JOIN(bazaar::photo
.ON(product_photo::photo_id.EQ(&photo::photo_id)))
.WHERE(
product::name.EQ(&"GTX660 Ti videocard".to_owned())
.AND(category::name.EQ(&"Electronic".to_owned()))
)
.GROUP_BY(&[category::name])
.HAVING(COUNT(&"*").GT(&1))
.HAVING(COUNT(&product::product_id).GT(&1))
.ORDER_BY(&[product::name.ASC(), product::created.DESC()])
.build(db.as_ref());
let expected = "
SELECT *
FROM bazaar.product
LEFT JOIN bazaar.product_category
ON ( product_category.product_id = product.product_id AND product_category.product_id = product.product_id )
LEFT JOIN bazaar.category
ON category.category_id = product_category.category_id
LEFT JOIN bazaar.product_photo
ON product.product_id = product_photo.product_id
LEFT JOIN bazaar.photo
ON product_photo.photo_id = photo.photo_id
WHERE ( product.name = $1 AND category.name = $2 )
GROUP BY category.name
HAVING COUNT(*) > $3 , COUNT(product.product_id) > $4
ORDER BY product.name ASC, product.created DESC
".to_string();
println!("actual: {{{}}} [{}]", frag.sql, frag.sql.len());
println!("expected: {{{}}} [{}]", expected, expected.len());
assert!(frag.sql.trim() == expected.trim());
}
| main | identifier_name |
atomic_usize.rs | use std::cell::UnsafeCell;
use std::fmt;
use std::ops;
/// `AtomicUsize` providing an additional `load_unsync` function.
pub(crate) struct AtomicUsize {
inner: UnsafeCell<std::sync::atomic::AtomicUsize>,
}
unsafe impl Send for AtomicUsize {}
unsafe impl Sync for AtomicUsize {}
impl AtomicUsize {
pub(crate) const fn new(val: usize) -> AtomicUsize {
let inner = UnsafeCell::new(std::sync::atomic::AtomicUsize::new(val));
AtomicUsize { inner }
}
/// Performs an unsynchronized load.
///
/// # Safety
///
/// All mutations must have happened before the unsynchronized load.
/// Additionally, there must be no concurrent mutations.
pub(crate) unsafe fn unsync_load(&self) -> usize {
*(*self.inner.get()).get_mut() |
pub(crate) fn with_mut<R>(&mut self, f: impl FnOnce(&mut usize) -> R) -> R {
// safety: we have mutable access
f(unsafe { (*self.inner.get()).get_mut() })
}
}
impl ops::Deref for AtomicUsize {
type Target = std::sync::atomic::AtomicUsize;
fn deref(&self) -> &Self::Target {
// safety: it is always safe to access `&self` fns on the inner value as
// we never perform unsafe mutations.
unsafe { &*self.inner.get() }
}
}
impl ops::DerefMut for AtomicUsize {
fn deref_mut(&mut self) -> &mut Self::Target {
// safety: we hold `&mut self`
unsafe { &mut *self.inner.get() }
}
}
impl fmt::Debug for AtomicUsize {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
(**self).fmt(fmt)
}
} | } | random_line_split |
atomic_usize.rs | use std::cell::UnsafeCell;
use std::fmt;
use std::ops;
/// `AtomicUsize` providing an additional `load_unsync` function.
pub(crate) struct AtomicUsize {
inner: UnsafeCell<std::sync::atomic::AtomicUsize>,
}
unsafe impl Send for AtomicUsize {}
unsafe impl Sync for AtomicUsize {}
impl AtomicUsize {
pub(crate) const fn new(val: usize) -> AtomicUsize {
let inner = UnsafeCell::new(std::sync::atomic::AtomicUsize::new(val));
AtomicUsize { inner }
}
/// Performs an unsynchronized load.
///
/// # Safety
///
/// All mutations must have happened before the unsynchronized load.
/// Additionally, there must be no concurrent mutations.
pub(crate) unsafe fn unsync_load(&self) -> usize |
pub(crate) fn with_mut<R>(&mut self, f: impl FnOnce(&mut usize) -> R) -> R {
// safety: we have mutable access
f(unsafe { (*self.inner.get()).get_mut() })
}
}
impl ops::Deref for AtomicUsize {
type Target = std::sync::atomic::AtomicUsize;
fn deref(&self) -> &Self::Target {
// safety: it is always safe to access `&self` fns on the inner value as
// we never perform unsafe mutations.
unsafe { &*self.inner.get() }
}
}
impl ops::DerefMut for AtomicUsize {
fn deref_mut(&mut self) -> &mut Self::Target {
// safety: we hold `&mut self`
unsafe { &mut *self.inner.get() }
}
}
impl fmt::Debug for AtomicUsize {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
(**self).fmt(fmt)
}
}
| {
*(*self.inner.get()).get_mut()
} | identifier_body |
atomic_usize.rs | use std::cell::UnsafeCell;
use std::fmt;
use std::ops;
/// `AtomicUsize` providing an additional `load_unsync` function.
pub(crate) struct AtomicUsize {
inner: UnsafeCell<std::sync::atomic::AtomicUsize>,
}
unsafe impl Send for AtomicUsize {}
unsafe impl Sync for AtomicUsize {}
impl AtomicUsize {
pub(crate) const fn new(val: usize) -> AtomicUsize {
let inner = UnsafeCell::new(std::sync::atomic::AtomicUsize::new(val));
AtomicUsize { inner }
}
/// Performs an unsynchronized load.
///
/// # Safety
///
/// All mutations must have happened before the unsynchronized load.
/// Additionally, there must be no concurrent mutations.
pub(crate) unsafe fn unsync_load(&self) -> usize {
*(*self.inner.get()).get_mut()
}
pub(crate) fn with_mut<R>(&mut self, f: impl FnOnce(&mut usize) -> R) -> R {
// safety: we have mutable access
f(unsafe { (*self.inner.get()).get_mut() })
}
}
impl ops::Deref for AtomicUsize {
type Target = std::sync::atomic::AtomicUsize;
fn deref(&self) -> &Self::Target {
// safety: it is always safe to access `&self` fns on the inner value as
// we never perform unsafe mutations.
unsafe { &*self.inner.get() }
}
}
impl ops::DerefMut for AtomicUsize {
fn | (&mut self) -> &mut Self::Target {
// safety: we hold `&mut self`
unsafe { &mut *self.inner.get() }
}
}
impl fmt::Debug for AtomicUsize {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
(**self).fmt(fmt)
}
}
| deref_mut | identifier_name |
error.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities to throw exceptions from Rust bindings.
#[cfg(feature = "js_backtrace")]
use backtrace::Backtrace;
#[cfg(feature = "js_backtrace")]
use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::DOMExceptionBinding::DOMExceptionMethods;
use dom::bindings::codegen::PrototypeList::proto_id_to_name;
use dom::bindings::conversions::{ConversionResult, FromJSValConvertible, ToJSValConvertible};
use dom::bindings::conversions::root_from_object;
use dom::bindings::str::USVString;
use dom::domexception::{DOMErrorName, DOMException};
use dom::globalscope::GlobalScope;
use js::error::{throw_range_error, throw_type_error};
use js::jsapi::JSContext;
use js::jsapi::JS_ClearPendingException;
use js::jsapi::JS_IsExceptionPending;
use js::jsval::UndefinedValue;
use js::rust::HandleObject;
use js::rust::MutableHandleValue;
use js::rust::wrappers::JS_ErrorFromException;
use js::rust::wrappers::JS_GetPendingException;
use js::rust::wrappers::JS_SetPendingException;
use libc::c_uint;
use std::slice::from_raw_parts;
/// An optional stringified JS backtrace and stringified native backtrace from the
/// the last DOM exception that was reported.
#[cfg(feature = "js_backtrace")]
thread_local!(static LAST_EXCEPTION_BACKTRACE: DomRefCell<Option<(Option<String>, String)>> = DomRefCell::new(None));
/// DOM exceptions that can be thrown by a native DOM method.
#[derive(Clone, Debug, MallocSizeOf)]
pub enum Error {
/// IndexSizeError DOMException
IndexSize,
/// NotFoundError DOMException
NotFound,
/// HierarchyRequestError DOMException
HierarchyRequest,
/// WrongDocumentError DOMException
WrongDocument,
/// InvalidCharacterError DOMException
InvalidCharacter,
/// NotSupportedError DOMException
NotSupported,
/// InUseAttributeError DOMException
InUseAttribute,
/// InvalidStateError DOMException
InvalidState,
/// SyntaxError DOMException
Syntax,
/// NamespaceError DOMException
Namespace,
/// InvalidAccessError DOMException
InvalidAccess,
/// SecurityError DOMException
Security,
/// NetworkError DOMException
Network,
/// AbortError DOMException
Abort,
/// TimeoutError DOMException
Timeout,
/// InvalidNodeTypeError DOMException
InvalidNodeType,
/// DataCloneError DOMException
DataClone,
/// NoModificationAllowedError DOMException
NoModificationAllowed,
/// QuotaExceededError DOMException
QuotaExceeded,
/// TypeMismatchError DOMException
TypeMismatch,
/// InvalidModificationError DOMException
InvalidModification,
/// NotReadableError DOMException
NotReadable,
/// TypeError JavaScript Error
Type(String),
/// RangeError JavaScript Error
Range(String),
/// A JavaScript exception is already pending.
JSFailed,
}
/// The return type for IDL operations that can throw DOM exceptions.
pub type Fallible<T> = Result<T, Error>;
/// The return type for IDL operations that can throw DOM exceptions and
/// return `()`.
pub type ErrorResult = Fallible<()>;
/// Set a pending exception for the given `result` on `cx`.
pub unsafe fn | (cx: *mut JSContext, global: &GlobalScope, result: Error) {
#[cfg(feature = "js_backtrace")]
{
capture_stack!(in(cx) let stack);
let js_stack = stack.and_then(|s| s.as_string(None));
let rust_stack = Backtrace::new();
LAST_EXCEPTION_BACKTRACE.with(|backtrace| {
*backtrace.borrow_mut() = Some((js_stack, format!("{:?}", rust_stack)));
});
}
let code = match result {
Error::IndexSize => DOMErrorName::IndexSizeError,
Error::NotFound => DOMErrorName::NotFoundError,
Error::HierarchyRequest => DOMErrorName::HierarchyRequestError,
Error::WrongDocument => DOMErrorName::WrongDocumentError,
Error::InvalidCharacter => DOMErrorName::InvalidCharacterError,
Error::NotSupported => DOMErrorName::NotSupportedError,
Error::InUseAttribute => DOMErrorName::InUseAttributeError,
Error::InvalidState => DOMErrorName::InvalidStateError,
Error::Syntax => DOMErrorName::SyntaxError,
Error::Namespace => DOMErrorName::NamespaceError,
Error::InvalidAccess => DOMErrorName::InvalidAccessError,
Error::Security => DOMErrorName::SecurityError,
Error::Network => DOMErrorName::NetworkError,
Error::Abort => DOMErrorName::AbortError,
Error::Timeout => DOMErrorName::TimeoutError,
Error::InvalidNodeType => DOMErrorName::InvalidNodeTypeError,
Error::DataClone => DOMErrorName::DataCloneError,
Error::NoModificationAllowed => DOMErrorName::NoModificationAllowedError,
Error::QuotaExceeded => DOMErrorName::QuotaExceededError,
Error::TypeMismatch => DOMErrorName::TypeMismatchError,
Error::InvalidModification => DOMErrorName::InvalidModificationError,
Error::NotReadable => DOMErrorName::NotReadableError,
Error::Type(message) => {
assert!(!JS_IsExceptionPending(cx));
throw_type_error(cx, &message);
return;
},
Error::Range(message) => {
assert!(!JS_IsExceptionPending(cx));
throw_range_error(cx, &message);
return;
},
Error::JSFailed => {
assert!(JS_IsExceptionPending(cx));
return;
},
};
assert!(!JS_IsExceptionPending(cx));
let exception = DOMException::new(global, code);
rooted!(in(cx) let mut thrown = UndefinedValue());
exception.to_jsval(cx, thrown.handle_mut());
JS_SetPendingException(cx, thrown.handle());
}
/// A struct encapsulating information about a runtime script error.
pub struct ErrorInfo {
/// The error message.
pub message: String,
/// The file name.
pub filename: String,
/// The line number.
pub lineno: c_uint,
/// The column number.
pub column: c_uint,
}
impl ErrorInfo {
unsafe fn from_native_error(cx: *mut JSContext, object: HandleObject) -> Option<ErrorInfo> {
let report = JS_ErrorFromException(cx, object);
if report.is_null() {
return None;
}
let filename = {
let filename = (*report)._base.filename as *const u8;
if!filename.is_null() {
let length = (0..).find(|idx| *filename.offset(*idx) == 0).unwrap();
let filename = from_raw_parts(filename, length as usize);
String::from_utf8_lossy(filename).into_owned()
} else {
"none".to_string()
}
};
let lineno = (*report)._base.lineno;
let column = (*report)._base.column;
let message = {
let message = (*report)._base.message_.data_ as *const u8;
let length = (0..).find(|idx| *message.offset(*idx) == 0).unwrap();
let message = from_raw_parts(message, length as usize);
String::from_utf8_lossy(message).into_owned()
};
Some(ErrorInfo {
filename: filename,
message: message,
lineno: lineno,
column: column,
})
}
fn from_dom_exception(object: HandleObject) -> Option<ErrorInfo> {
let exception = match root_from_object::<DOMException>(object.get()) {
Ok(exception) => exception,
Err(_) => return None,
};
Some(ErrorInfo {
filename: "".to_string(),
message: exception.Stringifier().into(),
lineno: 0,
column: 0,
})
}
}
/// Report a pending exception, thereby clearing it.
///
/// The `dispatch_event` argument is temporary and non-standard; passing false
/// prevents dispatching the `error` event.
pub unsafe fn report_pending_exception(cx: *mut JSContext, dispatch_event: bool) {
if!JS_IsExceptionPending(cx) {
return;
}
rooted!(in(cx) let mut value = UndefinedValue());
if!JS_GetPendingException(cx, value.handle_mut()) {
JS_ClearPendingException(cx);
error!("Uncaught exception: JS_GetPendingException failed");
return;
}
JS_ClearPendingException(cx);
let error_info = if value.is_object() {
rooted!(in(cx) let object = value.to_object());
ErrorInfo::from_native_error(cx, object.handle())
.or_else(|| ErrorInfo::from_dom_exception(object.handle()))
.unwrap_or_else(|| ErrorInfo {
message: format!("uncaught exception: unknown (can't convert to string)"),
filename: String::new(),
lineno: 0,
column: 0,
})
} else {
match USVString::from_jsval(cx, value.handle(), ()) {
Ok(ConversionResult::Success(USVString(string))) => ErrorInfo {
message: format!("uncaught exception: {}", string),
filename: String::new(),
lineno: 0,
column: 0,
},
_ => {
panic!("Uncaught exception: failed to stringify primitive");
},
}
};
error!(
"Error at {}:{}:{} {}",
error_info.filename, error_info.lineno, error_info.column, error_info.message
);
#[cfg(feature = "js_backtrace")]
{
LAST_EXCEPTION_BACKTRACE.with(|backtrace| {
if let Some((js_backtrace, rust_backtrace)) = backtrace.borrow_mut().take() {
if let Some(stack) = js_backtrace {
eprintln!("JS backtrace:\n{}", stack);
}
eprintln!("Rust backtrace:\n{}", rust_backtrace);
}
});
}
if dispatch_event {
GlobalScope::from_context(cx).report_an_error(error_info, value.handle());
}
}
/// Throw an exception to signal that a `JSObject` can not be converted to a
/// given DOM type.
pub unsafe fn throw_invalid_this(cx: *mut JSContext, proto_id: u16) {
debug_assert!(!JS_IsExceptionPending(cx));
let error = format!(
"\"this\" object does not implement interface {}.",
proto_id_to_name(proto_id)
);
throw_type_error(cx, &error);
}
impl Error {
/// Convert this error value to a JS value, consuming it in the process.
pub unsafe fn to_jsval(
self,
cx: *mut JSContext,
global: &GlobalScope,
rval: MutableHandleValue,
) {
assert!(!JS_IsExceptionPending(cx));
throw_dom_exception(cx, global, self);
assert!(JS_IsExceptionPending(cx));
assert!(JS_GetPendingException(cx, rval));
JS_ClearPendingException(cx);
}
}
| throw_dom_exception | identifier_name |
error.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities to throw exceptions from Rust bindings.
#[cfg(feature = "js_backtrace")]
use backtrace::Backtrace;
#[cfg(feature = "js_backtrace")]
use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::DOMExceptionBinding::DOMExceptionMethods;
use dom::bindings::codegen::PrototypeList::proto_id_to_name;
use dom::bindings::conversions::{ConversionResult, FromJSValConvertible, ToJSValConvertible};
use dom::bindings::conversions::root_from_object;
use dom::bindings::str::USVString;
use dom::domexception::{DOMErrorName, DOMException};
use dom::globalscope::GlobalScope;
use js::error::{throw_range_error, throw_type_error};
use js::jsapi::JSContext;
use js::jsapi::JS_ClearPendingException;
use js::jsapi::JS_IsExceptionPending;
use js::jsval::UndefinedValue;
use js::rust::HandleObject;
use js::rust::MutableHandleValue;
use js::rust::wrappers::JS_ErrorFromException;
use js::rust::wrappers::JS_GetPendingException;
use js::rust::wrappers::JS_SetPendingException;
use libc::c_uint;
use std::slice::from_raw_parts;
/// An optional stringified JS backtrace and stringified native backtrace from the
/// the last DOM exception that was reported.
#[cfg(feature = "js_backtrace")]
thread_local!(static LAST_EXCEPTION_BACKTRACE: DomRefCell<Option<(Option<String>, String)>> = DomRefCell::new(None));
/// DOM exceptions that can be thrown by a native DOM method.
#[derive(Clone, Debug, MallocSizeOf)]
pub enum Error {
/// IndexSizeError DOMException
IndexSize,
/// NotFoundError DOMException
NotFound,
/// HierarchyRequestError DOMException
HierarchyRequest,
/// WrongDocumentError DOMException
WrongDocument,
/// InvalidCharacterError DOMException
InvalidCharacter,
/// NotSupportedError DOMException
NotSupported,
/// InUseAttributeError DOMException
InUseAttribute,
/// InvalidStateError DOMException
InvalidState,
/// SyntaxError DOMException
Syntax,
/// NamespaceError DOMException
Namespace,
/// InvalidAccessError DOMException
InvalidAccess,
/// SecurityError DOMException
Security,
/// NetworkError DOMException
Network,
/// AbortError DOMException
Abort,
/// TimeoutError DOMException
Timeout,
/// InvalidNodeTypeError DOMException
InvalidNodeType,
/// DataCloneError DOMException
DataClone,
/// NoModificationAllowedError DOMException
NoModificationAllowed,
/// QuotaExceededError DOMException
QuotaExceeded,
/// TypeMismatchError DOMException
TypeMismatch,
/// InvalidModificationError DOMException
InvalidModification,
/// NotReadableError DOMException
NotReadable,
/// TypeError JavaScript Error
Type(String),
/// RangeError JavaScript Error
Range(String),
/// A JavaScript exception is already pending.
JSFailed,
}
/// The return type for IDL operations that can throw DOM exceptions.
pub type Fallible<T> = Result<T, Error>;
/// The return type for IDL operations that can throw DOM exceptions and
/// return `()`.
pub type ErrorResult = Fallible<()>;
/// Set a pending exception for the given `result` on `cx`.
pub unsafe fn throw_dom_exception(cx: *mut JSContext, global: &GlobalScope, result: Error) {
#[cfg(feature = "js_backtrace")]
{
capture_stack!(in(cx) let stack);
let js_stack = stack.and_then(|s| s.as_string(None));
let rust_stack = Backtrace::new();
LAST_EXCEPTION_BACKTRACE.with(|backtrace| {
*backtrace.borrow_mut() = Some((js_stack, format!("{:?}", rust_stack)));
});
}
let code = match result {
Error::IndexSize => DOMErrorName::IndexSizeError,
Error::NotFound => DOMErrorName::NotFoundError,
Error::HierarchyRequest => DOMErrorName::HierarchyRequestError,
Error::WrongDocument => DOMErrorName::WrongDocumentError,
Error::InvalidCharacter => DOMErrorName::InvalidCharacterError,
Error::NotSupported => DOMErrorName::NotSupportedError,
Error::InUseAttribute => DOMErrorName::InUseAttributeError,
Error::InvalidState => DOMErrorName::InvalidStateError,
Error::Syntax => DOMErrorName::SyntaxError,
Error::Namespace => DOMErrorName::NamespaceError,
Error::InvalidAccess => DOMErrorName::InvalidAccessError,
Error::Security => DOMErrorName::SecurityError,
Error::Network => DOMErrorName::NetworkError,
Error::Abort => DOMErrorName::AbortError,
Error::Timeout => DOMErrorName::TimeoutError,
Error::InvalidNodeType => DOMErrorName::InvalidNodeTypeError,
Error::DataClone => DOMErrorName::DataCloneError,
Error::NoModificationAllowed => DOMErrorName::NoModificationAllowedError,
Error::QuotaExceeded => DOMErrorName::QuotaExceededError,
Error::TypeMismatch => DOMErrorName::TypeMismatchError,
Error::InvalidModification => DOMErrorName::InvalidModificationError,
Error::NotReadable => DOMErrorName::NotReadableError,
Error::Type(message) => {
assert!(!JS_IsExceptionPending(cx));
throw_type_error(cx, &message);
return;
},
Error::Range(message) => {
assert!(!JS_IsExceptionPending(cx));
throw_range_error(cx, &message);
return;
},
Error::JSFailed => {
assert!(JS_IsExceptionPending(cx));
return;
},
};
assert!(!JS_IsExceptionPending(cx));
let exception = DOMException::new(global, code);
rooted!(in(cx) let mut thrown = UndefinedValue());
exception.to_jsval(cx, thrown.handle_mut());
JS_SetPendingException(cx, thrown.handle());
}
/// A struct encapsulating information about a runtime script error.
pub struct ErrorInfo {
/// The error message.
pub message: String,
/// The file name.
pub filename: String,
/// The line number.
pub lineno: c_uint,
/// The column number.
pub column: c_uint,
}
impl ErrorInfo {
unsafe fn from_native_error(cx: *mut JSContext, object: HandleObject) -> Option<ErrorInfo> {
let report = JS_ErrorFromException(cx, object);
if report.is_null() {
return None;
}
let filename = {
let filename = (*report)._base.filename as *const u8;
if!filename.is_null() {
let length = (0..).find(|idx| *filename.offset(*idx) == 0).unwrap();
let filename = from_raw_parts(filename, length as usize);
String::from_utf8_lossy(filename).into_owned()
} else {
"none".to_string()
}
};
let lineno = (*report)._base.lineno;
let column = (*report)._base.column;
let message = {
let message = (*report)._base.message_.data_ as *const u8;
let length = (0..).find(|idx| *message.offset(*idx) == 0).unwrap();
let message = from_raw_parts(message, length as usize);
String::from_utf8_lossy(message).into_owned()
};
Some(ErrorInfo {
filename: filename,
message: message,
lineno: lineno,
column: column,
})
}
fn from_dom_exception(object: HandleObject) -> Option<ErrorInfo> {
let exception = match root_from_object::<DOMException>(object.get()) {
Ok(exception) => exception,
Err(_) => return None,
};
Some(ErrorInfo {
filename: "".to_string(),
message: exception.Stringifier().into(),
lineno: 0,
column: 0,
})
}
}
/// Report a pending exception, thereby clearing it.
///
/// The `dispatch_event` argument is temporary and non-standard; passing false
/// prevents dispatching the `error` event.
pub unsafe fn report_pending_exception(cx: *mut JSContext, dispatch_event: bool) {
if!JS_IsExceptionPending(cx) {
return;
}
rooted!(in(cx) let mut value = UndefinedValue());
if!JS_GetPendingException(cx, value.handle_mut()) {
JS_ClearPendingException(cx);
error!("Uncaught exception: JS_GetPendingException failed");
return;
}
JS_ClearPendingException(cx);
let error_info = if value.is_object() {
rooted!(in(cx) let object = value.to_object());
ErrorInfo::from_native_error(cx, object.handle())
.or_else(|| ErrorInfo::from_dom_exception(object.handle()))
.unwrap_or_else(|| ErrorInfo {
message: format!("uncaught exception: unknown (can't convert to string)"),
filename: String::new(),
lineno: 0,
column: 0,
})
} else {
match USVString::from_jsval(cx, value.handle(), ()) {
Ok(ConversionResult::Success(USVString(string))) => ErrorInfo {
message: format!("uncaught exception: {}", string),
filename: String::new(),
lineno: 0,
column: 0,
},
_ => {
panic!("Uncaught exception: failed to stringify primitive");
},
}
};
error!(
"Error at {}:{}:{} {}",
error_info.filename, error_info.lineno, error_info.column, error_info.message
);
#[cfg(feature = "js_backtrace")]
{
LAST_EXCEPTION_BACKTRACE.with(|backtrace| {
if let Some((js_backtrace, rust_backtrace)) = backtrace.borrow_mut().take() {
if let Some(stack) = js_backtrace {
eprintln!("JS backtrace:\n{}", stack);
}
eprintln!("Rust backtrace:\n{}", rust_backtrace);
}
});
}
if dispatch_event {
GlobalScope::from_context(cx).report_an_error(error_info, value.handle());
}
}
/// Throw an exception to signal that a `JSObject` can not be converted to a
/// given DOM type.
pub unsafe fn throw_invalid_this(cx: *mut JSContext, proto_id: u16) |
impl Error {
/// Convert this error value to a JS value, consuming it in the process.
pub unsafe fn to_jsval(
self,
cx: *mut JSContext,
global: &GlobalScope,
rval: MutableHandleValue,
) {
assert!(!JS_IsExceptionPending(cx));
throw_dom_exception(cx, global, self);
assert!(JS_IsExceptionPending(cx));
assert!(JS_GetPendingException(cx, rval));
JS_ClearPendingException(cx);
}
}
| {
debug_assert!(!JS_IsExceptionPending(cx));
let error = format!(
"\"this\" object does not implement interface {}.",
proto_id_to_name(proto_id)
);
throw_type_error(cx, &error);
} | identifier_body |
error.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities to throw exceptions from Rust bindings.
#[cfg(feature = "js_backtrace")]
use backtrace::Backtrace;
#[cfg(feature = "js_backtrace")]
use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::DOMExceptionBinding::DOMExceptionMethods;
use dom::bindings::codegen::PrototypeList::proto_id_to_name;
use dom::bindings::conversions::{ConversionResult, FromJSValConvertible, ToJSValConvertible};
use dom::bindings::conversions::root_from_object;
use dom::bindings::str::USVString;
use dom::domexception::{DOMErrorName, DOMException};
use dom::globalscope::GlobalScope;
use js::error::{throw_range_error, throw_type_error};
use js::jsapi::JSContext;
use js::jsapi::JS_ClearPendingException;
use js::jsapi::JS_IsExceptionPending;
use js::jsval::UndefinedValue;
use js::rust::HandleObject;
use js::rust::MutableHandleValue;
use js::rust::wrappers::JS_ErrorFromException;
use js::rust::wrappers::JS_GetPendingException;
use js::rust::wrappers::JS_SetPendingException;
use libc::c_uint;
use std::slice::from_raw_parts;
/// An optional stringified JS backtrace and stringified native backtrace from the
/// the last DOM exception that was reported.
#[cfg(feature = "js_backtrace")]
thread_local!(static LAST_EXCEPTION_BACKTRACE: DomRefCell<Option<(Option<String>, String)>> = DomRefCell::new(None));
/// DOM exceptions that can be thrown by a native DOM method.
#[derive(Clone, Debug, MallocSizeOf)]
pub enum Error {
/// IndexSizeError DOMException
IndexSize,
/// NotFoundError DOMException
NotFound,
/// HierarchyRequestError DOMException
HierarchyRequest,
/// WrongDocumentError DOMException
WrongDocument,
/// InvalidCharacterError DOMException
InvalidCharacter,
/// NotSupportedError DOMException
NotSupported,
/// InUseAttributeError DOMException
InUseAttribute,
/// InvalidStateError DOMException
InvalidState,
/// SyntaxError DOMException
Syntax,
/// NamespaceError DOMException
Namespace,
/// InvalidAccessError DOMException
InvalidAccess,
/// SecurityError DOMException
Security,
/// NetworkError DOMException
Network,
/// AbortError DOMException
Abort,
/// TimeoutError DOMException
Timeout,
/// InvalidNodeTypeError DOMException
InvalidNodeType,
/// DataCloneError DOMException
DataClone,
/// NoModificationAllowedError DOMException
NoModificationAllowed,
/// QuotaExceededError DOMException
QuotaExceeded,
/// TypeMismatchError DOMException
TypeMismatch,
/// InvalidModificationError DOMException
InvalidModification,
/// NotReadableError DOMException
NotReadable,
/// TypeError JavaScript Error
Type(String),
/// RangeError JavaScript Error
Range(String),
/// A JavaScript exception is already pending.
JSFailed,
}
/// The return type for IDL operations that can throw DOM exceptions.
pub type Fallible<T> = Result<T, Error>;
/// The return type for IDL operations that can throw DOM exceptions and
/// return `()`.
pub type ErrorResult = Fallible<()>;
/// Set a pending exception for the given `result` on `cx`.
pub unsafe fn throw_dom_exception(cx: *mut JSContext, global: &GlobalScope, result: Error) {
#[cfg(feature = "js_backtrace")]
{
capture_stack!(in(cx) let stack);
let js_stack = stack.and_then(|s| s.as_string(None));
let rust_stack = Backtrace::new();
LAST_EXCEPTION_BACKTRACE.with(|backtrace| {
*backtrace.borrow_mut() = Some((js_stack, format!("{:?}", rust_stack)));
});
}
let code = match result {
Error::IndexSize => DOMErrorName::IndexSizeError,
Error::NotFound => DOMErrorName::NotFoundError,
Error::HierarchyRequest => DOMErrorName::HierarchyRequestError,
Error::WrongDocument => DOMErrorName::WrongDocumentError,
Error::InvalidCharacter => DOMErrorName::InvalidCharacterError,
Error::NotSupported => DOMErrorName::NotSupportedError,
Error::InUseAttribute => DOMErrorName::InUseAttributeError,
Error::InvalidState => DOMErrorName::InvalidStateError,
Error::Syntax => DOMErrorName::SyntaxError,
Error::Namespace => DOMErrorName::NamespaceError,
Error::InvalidAccess => DOMErrorName::InvalidAccessError,
Error::Security => DOMErrorName::SecurityError,
Error::Network => DOMErrorName::NetworkError,
Error::Abort => DOMErrorName::AbortError,
Error::Timeout => DOMErrorName::TimeoutError,
Error::InvalidNodeType => DOMErrorName::InvalidNodeTypeError,
Error::DataClone => DOMErrorName::DataCloneError,
Error::NoModificationAllowed => DOMErrorName::NoModificationAllowedError,
Error::QuotaExceeded => DOMErrorName::QuotaExceededError,
Error::TypeMismatch => DOMErrorName::TypeMismatchError,
Error::InvalidModification => DOMErrorName::InvalidModificationError,
Error::NotReadable => DOMErrorName::NotReadableError,
Error::Type(message) => {
assert!(!JS_IsExceptionPending(cx));
throw_type_error(cx, &message);
return;
},
Error::Range(message) => {
assert!(!JS_IsExceptionPending(cx));
throw_range_error(cx, &message);
return;
},
Error::JSFailed => {
assert!(JS_IsExceptionPending(cx));
return;
},
};
assert!(!JS_IsExceptionPending(cx));
let exception = DOMException::new(global, code);
rooted!(in(cx) let mut thrown = UndefinedValue());
exception.to_jsval(cx, thrown.handle_mut());
JS_SetPendingException(cx, thrown.handle());
}
/// A struct encapsulating information about a runtime script error.
pub struct ErrorInfo {
/// The error message.
pub message: String,
/// The file name.
pub filename: String,
/// The line number.
pub lineno: c_uint,
/// The column number.
pub column: c_uint,
}
impl ErrorInfo {
unsafe fn from_native_error(cx: *mut JSContext, object: HandleObject) -> Option<ErrorInfo> {
let report = JS_ErrorFromException(cx, object);
if report.is_null() |
let filename = {
let filename = (*report)._base.filename as *const u8;
if!filename.is_null() {
let length = (0..).find(|idx| *filename.offset(*idx) == 0).unwrap();
let filename = from_raw_parts(filename, length as usize);
String::from_utf8_lossy(filename).into_owned()
} else {
"none".to_string()
}
};
let lineno = (*report)._base.lineno;
let column = (*report)._base.column;
let message = {
let message = (*report)._base.message_.data_ as *const u8;
let length = (0..).find(|idx| *message.offset(*idx) == 0).unwrap();
let message = from_raw_parts(message, length as usize);
String::from_utf8_lossy(message).into_owned()
};
Some(ErrorInfo {
filename: filename,
message: message,
lineno: lineno,
column: column,
})
}
fn from_dom_exception(object: HandleObject) -> Option<ErrorInfo> {
let exception = match root_from_object::<DOMException>(object.get()) {
Ok(exception) => exception,
Err(_) => return None,
};
Some(ErrorInfo {
filename: "".to_string(),
message: exception.Stringifier().into(),
lineno: 0,
column: 0,
})
}
}
/// Report a pending exception, thereby clearing it.
///
/// The `dispatch_event` argument is temporary and non-standard; passing false
/// prevents dispatching the `error` event.
pub unsafe fn report_pending_exception(cx: *mut JSContext, dispatch_event: bool) {
if!JS_IsExceptionPending(cx) {
return;
}
rooted!(in(cx) let mut value = UndefinedValue());
if!JS_GetPendingException(cx, value.handle_mut()) {
JS_ClearPendingException(cx);
error!("Uncaught exception: JS_GetPendingException failed");
return;
}
JS_ClearPendingException(cx);
let error_info = if value.is_object() {
rooted!(in(cx) let object = value.to_object());
ErrorInfo::from_native_error(cx, object.handle())
.or_else(|| ErrorInfo::from_dom_exception(object.handle()))
.unwrap_or_else(|| ErrorInfo {
message: format!("uncaught exception: unknown (can't convert to string)"),
filename: String::new(),
lineno: 0,
column: 0,
})
} else {
match USVString::from_jsval(cx, value.handle(), ()) {
Ok(ConversionResult::Success(USVString(string))) => ErrorInfo {
message: format!("uncaught exception: {}", string),
filename: String::new(),
lineno: 0,
column: 0,
},
_ => {
panic!("Uncaught exception: failed to stringify primitive");
},
}
};
error!(
"Error at {}:{}:{} {}",
error_info.filename, error_info.lineno, error_info.column, error_info.message
);
#[cfg(feature = "js_backtrace")]
{
LAST_EXCEPTION_BACKTRACE.with(|backtrace| {
if let Some((js_backtrace, rust_backtrace)) = backtrace.borrow_mut().take() {
if let Some(stack) = js_backtrace {
eprintln!("JS backtrace:\n{}", stack);
}
eprintln!("Rust backtrace:\n{}", rust_backtrace);
}
});
}
if dispatch_event {
GlobalScope::from_context(cx).report_an_error(error_info, value.handle());
}
}
/// Throw an exception to signal that a `JSObject` can not be converted to a
/// given DOM type.
pub unsafe fn throw_invalid_this(cx: *mut JSContext, proto_id: u16) {
debug_assert!(!JS_IsExceptionPending(cx));
let error = format!(
"\"this\" object does not implement interface {}.",
proto_id_to_name(proto_id)
);
throw_type_error(cx, &error);
}
impl Error {
/// Convert this error value to a JS value, consuming it in the process.
pub unsafe fn to_jsval(
self,
cx: *mut JSContext,
global: &GlobalScope,
rval: MutableHandleValue,
) {
assert!(!JS_IsExceptionPending(cx));
throw_dom_exception(cx, global, self);
assert!(JS_IsExceptionPending(cx));
assert!(JS_GetPendingException(cx, rval));
JS_ClearPendingException(cx);
}
}
| {
return None;
} | conditional_block |
error.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities to throw exceptions from Rust bindings.
#[cfg(feature = "js_backtrace")]
use backtrace::Backtrace;
#[cfg(feature = "js_backtrace")]
use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::DOMExceptionBinding::DOMExceptionMethods;
use dom::bindings::codegen::PrototypeList::proto_id_to_name;
use dom::bindings::conversions::{ConversionResult, FromJSValConvertible, ToJSValConvertible};
use dom::bindings::conversions::root_from_object;
use dom::bindings::str::USVString;
use dom::domexception::{DOMErrorName, DOMException};
use dom::globalscope::GlobalScope;
use js::error::{throw_range_error, throw_type_error};
use js::jsapi::JSContext;
use js::jsapi::JS_ClearPendingException;
use js::jsapi::JS_IsExceptionPending;
use js::jsval::UndefinedValue;
use js::rust::HandleObject;
use js::rust::MutableHandleValue;
use js::rust::wrappers::JS_ErrorFromException;
use js::rust::wrappers::JS_GetPendingException;
use js::rust::wrappers::JS_SetPendingException;
use libc::c_uint;
use std::slice::from_raw_parts;
/// An optional stringified JS backtrace and stringified native backtrace from the
/// the last DOM exception that was reported.
#[cfg(feature = "js_backtrace")]
thread_local!(static LAST_EXCEPTION_BACKTRACE: DomRefCell<Option<(Option<String>, String)>> = DomRefCell::new(None));
/// DOM exceptions that can be thrown by a native DOM method.
#[derive(Clone, Debug, MallocSizeOf)]
pub enum Error {
/// IndexSizeError DOMException
IndexSize,
/// NotFoundError DOMException
NotFound,
/// HierarchyRequestError DOMException
HierarchyRequest,
/// WrongDocumentError DOMException
WrongDocument,
/// InvalidCharacterError DOMException
InvalidCharacter,
/// NotSupportedError DOMException
NotSupported,
/// InUseAttributeError DOMException
InUseAttribute,
/// InvalidStateError DOMException
InvalidState,
/// SyntaxError DOMException
Syntax,
/// NamespaceError DOMException
Namespace,
/// InvalidAccessError DOMException
InvalidAccess,
/// SecurityError DOMException
Security,
/// NetworkError DOMException
Network,
/// AbortError DOMException
Abort,
/// TimeoutError DOMException
Timeout,
/// InvalidNodeTypeError DOMException
InvalidNodeType,
/// DataCloneError DOMException
DataClone,
/// NoModificationAllowedError DOMException
NoModificationAllowed,
/// QuotaExceededError DOMException
QuotaExceeded,
/// TypeMismatchError DOMException
TypeMismatch,
/// InvalidModificationError DOMException
InvalidModification,
/// NotReadableError DOMException
NotReadable,
/// TypeError JavaScript Error
Type(String),
/// RangeError JavaScript Error
Range(String),
/// A JavaScript exception is already pending.
JSFailed,
}
/// The return type for IDL operations that can throw DOM exceptions.
pub type Fallible<T> = Result<T, Error>;
/// The return type for IDL operations that can throw DOM exceptions and
/// return `()`.
pub type ErrorResult = Fallible<()>;
/// Set a pending exception for the given `result` on `cx`.
pub unsafe fn throw_dom_exception(cx: *mut JSContext, global: &GlobalScope, result: Error) {
#[cfg(feature = "js_backtrace")]
{
capture_stack!(in(cx) let stack);
let js_stack = stack.and_then(|s| s.as_string(None));
let rust_stack = Backtrace::new();
LAST_EXCEPTION_BACKTRACE.with(|backtrace| {
*backtrace.borrow_mut() = Some((js_stack, format!("{:?}", rust_stack)));
});
}
let code = match result {
Error::IndexSize => DOMErrorName::IndexSizeError,
Error::NotFound => DOMErrorName::NotFoundError,
Error::HierarchyRequest => DOMErrorName::HierarchyRequestError,
Error::WrongDocument => DOMErrorName::WrongDocumentError,
Error::InvalidCharacter => DOMErrorName::InvalidCharacterError,
Error::NotSupported => DOMErrorName::NotSupportedError,
Error::InUseAttribute => DOMErrorName::InUseAttributeError,
Error::InvalidState => DOMErrorName::InvalidStateError,
Error::Syntax => DOMErrorName::SyntaxError,
Error::Namespace => DOMErrorName::NamespaceError,
Error::InvalidAccess => DOMErrorName::InvalidAccessError,
Error::Security => DOMErrorName::SecurityError,
Error::Network => DOMErrorName::NetworkError,
Error::Abort => DOMErrorName::AbortError,
Error::Timeout => DOMErrorName::TimeoutError,
Error::InvalidNodeType => DOMErrorName::InvalidNodeTypeError,
Error::DataClone => DOMErrorName::DataCloneError,
Error::NoModificationAllowed => DOMErrorName::NoModificationAllowedError,
Error::QuotaExceeded => DOMErrorName::QuotaExceededError,
Error::TypeMismatch => DOMErrorName::TypeMismatchError,
Error::InvalidModification => DOMErrorName::InvalidModificationError,
Error::NotReadable => DOMErrorName::NotReadableError,
Error::Type(message) => {
assert!(!JS_IsExceptionPending(cx));
throw_type_error(cx, &message);
return;
},
Error::Range(message) => {
assert!(!JS_IsExceptionPending(cx));
throw_range_error(cx, &message);
return;
},
Error::JSFailed => {
assert!(JS_IsExceptionPending(cx));
return;
},
};
assert!(!JS_IsExceptionPending(cx));
let exception = DOMException::new(global, code);
rooted!(in(cx) let mut thrown = UndefinedValue());
exception.to_jsval(cx, thrown.handle_mut());
JS_SetPendingException(cx, thrown.handle());
}
/// A struct encapsulating information about a runtime script error.
pub struct ErrorInfo {
/// The error message.
pub message: String,
/// The file name.
pub filename: String,
/// The line number.
pub lineno: c_uint,
/// The column number.
pub column: c_uint,
}
impl ErrorInfo {
unsafe fn from_native_error(cx: *mut JSContext, object: HandleObject) -> Option<ErrorInfo> {
let report = JS_ErrorFromException(cx, object);
if report.is_null() {
return None;
}
let filename = { | let filename = from_raw_parts(filename, length as usize);
String::from_utf8_lossy(filename).into_owned()
} else {
"none".to_string()
}
};
let lineno = (*report)._base.lineno;
let column = (*report)._base.column;
let message = {
let message = (*report)._base.message_.data_ as *const u8;
let length = (0..).find(|idx| *message.offset(*idx) == 0).unwrap();
let message = from_raw_parts(message, length as usize);
String::from_utf8_lossy(message).into_owned()
};
Some(ErrorInfo {
filename: filename,
message: message,
lineno: lineno,
column: column,
})
}
fn from_dom_exception(object: HandleObject) -> Option<ErrorInfo> {
let exception = match root_from_object::<DOMException>(object.get()) {
Ok(exception) => exception,
Err(_) => return None,
};
Some(ErrorInfo {
filename: "".to_string(),
message: exception.Stringifier().into(),
lineno: 0,
column: 0,
})
}
}
/// Report a pending exception, thereby clearing it.
///
/// The `dispatch_event` argument is temporary and non-standard; passing false
/// prevents dispatching the `error` event.
pub unsafe fn report_pending_exception(cx: *mut JSContext, dispatch_event: bool) {
if!JS_IsExceptionPending(cx) {
return;
}
rooted!(in(cx) let mut value = UndefinedValue());
if!JS_GetPendingException(cx, value.handle_mut()) {
JS_ClearPendingException(cx);
error!("Uncaught exception: JS_GetPendingException failed");
return;
}
JS_ClearPendingException(cx);
let error_info = if value.is_object() {
rooted!(in(cx) let object = value.to_object());
ErrorInfo::from_native_error(cx, object.handle())
.or_else(|| ErrorInfo::from_dom_exception(object.handle()))
.unwrap_or_else(|| ErrorInfo {
message: format!("uncaught exception: unknown (can't convert to string)"),
filename: String::new(),
lineno: 0,
column: 0,
})
} else {
match USVString::from_jsval(cx, value.handle(), ()) {
Ok(ConversionResult::Success(USVString(string))) => ErrorInfo {
message: format!("uncaught exception: {}", string),
filename: String::new(),
lineno: 0,
column: 0,
},
_ => {
panic!("Uncaught exception: failed to stringify primitive");
},
}
};
error!(
"Error at {}:{}:{} {}",
error_info.filename, error_info.lineno, error_info.column, error_info.message
);
#[cfg(feature = "js_backtrace")]
{
LAST_EXCEPTION_BACKTRACE.with(|backtrace| {
if let Some((js_backtrace, rust_backtrace)) = backtrace.borrow_mut().take() {
if let Some(stack) = js_backtrace {
eprintln!("JS backtrace:\n{}", stack);
}
eprintln!("Rust backtrace:\n{}", rust_backtrace);
}
});
}
if dispatch_event {
GlobalScope::from_context(cx).report_an_error(error_info, value.handle());
}
}
/// Throw an exception to signal that a `JSObject` can not be converted to a
/// given DOM type.
pub unsafe fn throw_invalid_this(cx: *mut JSContext, proto_id: u16) {
debug_assert!(!JS_IsExceptionPending(cx));
let error = format!(
"\"this\" object does not implement interface {}.",
proto_id_to_name(proto_id)
);
throw_type_error(cx, &error);
}
impl Error {
/// Convert this error value to a JS value, consuming it in the process.
pub unsafe fn to_jsval(
self,
cx: *mut JSContext,
global: &GlobalScope,
rval: MutableHandleValue,
) {
assert!(!JS_IsExceptionPending(cx));
throw_dom_exception(cx, global, self);
assert!(JS_IsExceptionPending(cx));
assert!(JS_GetPendingException(cx, rval));
JS_ClearPendingException(cx);
}
} | let filename = (*report)._base.filename as *const u8;
if !filename.is_null() {
let length = (0..).find(|idx| *filename.offset(*idx) == 0).unwrap(); | random_line_split |
collection.rs | //! This module contains code to parse all supported collection formats.
use std::fs::File;
use std::io::Read;
use crate::level::*;
use crate::util::*;
enum FileFormat {
Ascii,
Xml,
}
/// A collection of levels. This type contains logic for parsing a collection file. Other than
/// that, it is simply a list of Levels together with some metadata.
#[derive(Debug)]
pub struct Collection {
/// The full name of the collection.
name: String,
/// The name of the file containing the level collection.
short_name: String,
description: Option<String>,
number_of_levels: usize,
/// All levels of this collection. This variable is only written to when loading the
/// collection.
levels: Vec<Level>,
}
impl Collection {
#[cfg(test)]
pub fn from_levels(name: &str, levels: &[Level]) -> Collection {
Collection {
name: name.into(),
short_name: name.into(),
description: None,
number_of_levels: levels.len(),
levels: levels.into(),
}
}
/// Load a level set with the given name, whatever the format might be.
pub fn parse(short_name: &str) -> Result<Collection, SokobanError> {
Collection::parse_helper(short_name, true)
}
/// Figure out title, description, number of levels, etc. of a collection without parsing each
/// level.
pub fn parse_metadata(short_name: &str) -> Result<Collection, SokobanError> {
Collection::parse_helper(short_name, false)
}
fn parse_helper(short_name: &str, parse_levels: bool) -> Result<Collection, SokobanError> {
let mut level_path = ASSETS.clone();
level_path.push("levels");
level_path.push(short_name);
let (level_file, file_format) = {
level_path.set_extension("slc");
if let Ok(f) = File::open(&level_path) {
(f, FileFormat::Xml)
} else {
level_path.set_extension("lvl");
match File::open(level_path) {
Ok(f) => (f, FileFormat::Ascii),
Err(e) => return Err(SokobanError::from(e)),
}
}
};
Ok(match file_format {
FileFormat::Ascii => Collection::parse_lvl(short_name, level_file, parse_levels)?,
FileFormat::Xml => Collection::parse_xml(short_name, level_file, parse_levels)?,
})
}
/// Load a file containing a bunch of levels separated by an empty line, i.e. the usual ASCII
/// format.
fn parse_lvl(
short_name: &str,
file: File,
parse_levels: bool,
) -> Result<Collection, SokobanError> {
#[cfg(unix)]
const EMPTY_LINE: &str = "\n\n";
#[cfg(windows)]
const EMPTY_LINE: &str = "\r\n\r\n";
let eol = |c| c == '\n' || c == '\r'; |
let level_strings: Vec<_> = content
.split(EMPTY_LINE)
.map(|x| x.trim_matches(&eol))
.filter(|x|!x.is_empty())
.collect();
let name = level_strings[0].lines().next().unwrap();
let description = level_strings[0]
.splitn(1, &eol)
.last()
.map(|x| x.trim().to_owned());
// Parse the individual levels
let (num, levels) = {
if parse_levels {
let lvls = level_strings[1..]
.iter()
.enumerate()
.map(|(i, l)| Level::parse(i, l.trim_matches(&eol)))
.collect::<Result<Vec<_>, _>>()?;
(lvls.len(), lvls)
} else {
(level_strings.len() - 1, vec![])
}
};
Ok(Collection {
name: name.to_string(),
short_name: short_name.to_string(),
description,
number_of_levels: num,
levels,
})
}
/// Load a level set in the XML-based.slc format.
fn parse_xml(
short_name: &str,
file: File,
parse_levels: bool,
) -> Result<Collection, SokobanError> {
use quick_xml::events::Event;
use quick_xml::Reader;
enum State {
Nothing,
Title,
Description,
Email,
Url,
Line,
}
let file = ::std::io::BufReader::new(file);
let mut reader = Reader::from_reader(file);
let mut state = State::Nothing;
// Collection attributes
let mut title = String::new();
let mut description = String::new();
let mut email = String::new();
let mut url = String::new();
let mut levels = vec![];
// Level attributes
let mut num = 0;
let mut level_lines = String::new();
let mut buf = Vec::new();
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref e)) => match e.name() {
b"Title" => {
state = State::Title;
title.clear();
}
b"Description" => state = State::Description,
b"Email" => state = State::Email,
b"Url" => state = State::Url,
b"Level" => level_lines.clear(),
b"L" => state = State::Line,
_ => {}
},
Ok(Event::End(e)) => match e.name() {
b"Title" | b"Description" | b"Email" | b"Url" => state = State::Nothing,
b"Level" => {
if parse_levels {
levels.push(Level::parse(num, &level_lines)?);
}
num += 1;
}
b"L" => {
state = State::Nothing;
level_lines.push('\n');
}
_ => {}
},
Ok(Event::Text(ref e)) => match state {
State::Nothing => {}
State::Line if!parse_levels => {}
_ => {
let s = e.unescape_and_decode(&reader).unwrap();
match state {
State::Title => title.push_str(&s),
State::Description => description.push_str(&s),
State::Email => email.push_str(&s),
State::Url => url.push_str(&s),
State::Line => level_lines.push_str(&s),
_ => unreachable!(),
}
}
},
Ok(Event::Eof) => break,
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => {}
}
}
Ok(Collection {
name: title,
short_name: short_name.to_string(),
description: if description.is_empty() {
None
} else {
Some(description)
},
number_of_levels: num,
levels,
})
}
// Accessor methods
pub fn name(&self) -> &str {
&self.name
}
pub fn short_name(&self) -> &str {
&self.short_name
}
pub fn description(&self) -> Option<&str> {
match self.description {
Some(ref x) => Some(&x),
None => None,
}
}
pub fn first_level(&self) -> &Level {
&self.levels[0]
}
/// Get all levels.
pub fn levels(&self) -> &[Level] {
self.levels.as_ref()
}
pub fn number_of_levels(&self) -> usize {
self.number_of_levels
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn load_test_collections() {
assert!(Collection::parse("test_2").is_ok());
assert!(Collection::parse_metadata("test_2").is_ok());
assert!(Collection::parse("test3iuntrenutineaniutea").is_err());
assert!(Collection::parse_metadata("test3iuntrenutineaniutea").is_err());
}
} | let mut file = file;
// Read the collection’s file
let mut content = "".to_string();
file.read_to_string(&mut content)?; | random_line_split |
collection.rs | //! This module contains code to parse all supported collection formats.
use std::fs::File;
use std::io::Read;
use crate::level::*;
use crate::util::*;
enum FileFormat {
Ascii,
Xml,
}
/// A collection of levels. This type contains logic for parsing a collection file. Other than
/// that, it is simply a list of Levels together with some metadata.
#[derive(Debug)]
pub struct Collection {
/// The full name of the collection.
name: String,
/// The name of the file containing the level collection.
short_name: String,
description: Option<String>,
number_of_levels: usize,
/// All levels of this collection. This variable is only written to when loading the
/// collection.
levels: Vec<Level>,
}
impl Collection {
#[cfg(test)]
pub fn from_levels(name: &str, levels: &[Level]) -> Collection {
Collection {
name: name.into(),
short_name: name.into(),
description: None,
number_of_levels: levels.len(),
levels: levels.into(),
}
}
/// Load a level set with the given name, whatever the format might be.
pub fn parse(short_name: &str) -> Result<Collection, SokobanError> {
Collection::parse_helper(short_name, true)
}
/// Figure out title, description, number of levels, etc. of a collection without parsing each
/// level.
pub fn parse_metadata(short_name: &str) -> Result<Collection, SokobanError> {
Collection::parse_helper(short_name, false)
}
fn parse_helper(short_name: &str, parse_levels: bool) -> Result<Collection, SokobanError> {
let mut level_path = ASSETS.clone();
level_path.push("levels");
level_path.push(short_name);
let (level_file, file_format) = {
level_path.set_extension("slc");
if let Ok(f) = File::open(&level_path) {
(f, FileFormat::Xml)
} else {
level_path.set_extension("lvl");
match File::open(level_path) {
Ok(f) => (f, FileFormat::Ascii),
Err(e) => return Err(SokobanError::from(e)),
}
}
};
Ok(match file_format {
FileFormat::Ascii => Collection::parse_lvl(short_name, level_file, parse_levels)?,
FileFormat::Xml => Collection::parse_xml(short_name, level_file, parse_levels)?,
})
}
/// Load a file containing a bunch of levels separated by an empty line, i.e. the usual ASCII
/// format.
fn parse_lvl(
short_name: &str,
file: File,
parse_levels: bool,
) -> Result<Collection, SokobanError> {
#[cfg(unix)]
const EMPTY_LINE: &str = "\n\n";
#[cfg(windows)]
const EMPTY_LINE: &str = "\r\n\r\n";
let eol = |c| c == '\n' || c == '\r';
let mut file = file;
// Read the collection’s file
let mut content = "".to_string();
file.read_to_string(&mut content)?;
let level_strings: Vec<_> = content
.split(EMPTY_LINE)
.map(|x| x.trim_matches(&eol))
.filter(|x|!x.is_empty())
.collect();
let name = level_strings[0].lines().next().unwrap();
let description = level_strings[0]
.splitn(1, &eol)
.last()
.map(|x| x.trim().to_owned());
// Parse the individual levels
let (num, levels) = {
if parse_levels {
let lvls = level_strings[1..]
.iter()
.enumerate()
.map(|(i, l)| Level::parse(i, l.trim_matches(&eol)))
.collect::<Result<Vec<_>, _>>()?;
(lvls.len(), lvls)
} else {
(level_strings.len() - 1, vec![])
}
};
Ok(Collection {
name: name.to_string(),
short_name: short_name.to_string(),
description,
number_of_levels: num,
levels,
})
}
/// Load a level set in the XML-based.slc format.
fn parse_xml(
short_name: &str,
file: File,
parse_levels: bool,
) -> Result<Collection, SokobanError> {
use quick_xml::events::Event;
use quick_xml::Reader;
enum State {
Nothing,
Title,
Description,
Email,
Url,
Line,
}
let file = ::std::io::BufReader::new(file);
let mut reader = Reader::from_reader(file);
let mut state = State::Nothing;
// Collection attributes
let mut title = String::new();
let mut description = String::new();
let mut email = String::new();
let mut url = String::new();
let mut levels = vec![];
// Level attributes
let mut num = 0;
let mut level_lines = String::new();
let mut buf = Vec::new();
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref e)) => match e.name() {
b"Title" => {
state = State::Title;
title.clear();
}
b"Description" => state = State::Description,
b"Email" => state = State::Email,
b"Url" => state = State::Url,
b"Level" => level_lines.clear(),
b"L" => state = State::Line,
_ => {}
},
Ok(Event::End(e)) => match e.name() {
b"Title" | b"Description" | b"Email" | b"Url" => state = State::Nothing,
b"Level" => {
if parse_levels {
levels.push(Level::parse(num, &level_lines)?);
}
num += 1;
}
b"L" => {
state = State::Nothing;
level_lines.push('\n');
}
_ => {}
},
Ok(Event::Text(ref e)) => match state {
State::Nothing => {}
State::Line if!parse_levels => {}
_ => {
let s = e.unescape_and_decode(&reader).unwrap();
match state {
State::Title => title.push_str(&s),
State::Description => description.push_str(&s),
State::Email => email.push_str(&s),
State::Url => url.push_str(&s),
State::Line => level_lines.push_str(&s),
_ => unreachable!(),
}
}
},
Ok(Event::Eof) => break,
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => {}
}
}
Ok(Collection {
name: title,
short_name: short_name.to_string(),
description: if description.is_empty() {
None
} else {
Some(description)
},
number_of_levels: num,
levels,
})
}
// Accessor methods
pub fn name(&self) -> &str {
&self.name
}
pub fn short_name(&self) -> &str {
&self.short_name
}
pub fn de | self) -> Option<&str> {
match self.description {
Some(ref x) => Some(&x),
None => None,
}
}
pub fn first_level(&self) -> &Level {
&self.levels[0]
}
/// Get all levels.
pub fn levels(&self) -> &[Level] {
self.levels.as_ref()
}
pub fn number_of_levels(&self) -> usize {
self.number_of_levels
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn load_test_collections() {
assert!(Collection::parse("test_2").is_ok());
assert!(Collection::parse_metadata("test_2").is_ok());
assert!(Collection::parse("test3iuntrenutineaniutea").is_err());
assert!(Collection::parse_metadata("test3iuntrenutineaniutea").is_err());
}
}
| scription(& | identifier_name |
collection.rs | //! This module contains code to parse all supported collection formats.
use std::fs::File;
use std::io::Read;
use crate::level::*;
use crate::util::*;
enum FileFormat {
Ascii,
Xml,
}
/// A collection of levels. This type contains logic for parsing a collection file. Other than
/// that, it is simply a list of Levels together with some metadata.
#[derive(Debug)]
pub struct Collection {
/// The full name of the collection.
name: String,
/// The name of the file containing the level collection.
short_name: String,
description: Option<String>,
number_of_levels: usize,
/// All levels of this collection. This variable is only written to when loading the
/// collection.
levels: Vec<Level>,
}
impl Collection {
#[cfg(test)]
pub fn from_levels(name: &str, levels: &[Level]) -> Collection {
Collection {
name: name.into(),
short_name: name.into(),
description: None,
number_of_levels: levels.len(),
levels: levels.into(),
}
}
/// Load a level set with the given name, whatever the format might be.
pub fn parse(short_name: &str) -> Result<Collection, SokobanError> {
Collection::parse_helper(short_name, true)
}
/// Figure out title, description, number of levels, etc. of a collection without parsing each
/// level.
pub fn parse_metadata(short_name: &str) -> Result<Collection, SokobanError> {
Collection::parse_helper(short_name, false)
}
fn parse_helper(short_name: &str, parse_levels: bool) -> Result<Collection, SokobanError> {
let mut level_path = ASSETS.clone();
level_path.push("levels");
level_path.push(short_name);
let (level_file, file_format) = {
level_path.set_extension("slc");
if let Ok(f) = File::open(&level_path) {
(f, FileFormat::Xml)
} else {
level_path.set_extension("lvl");
match File::open(level_path) {
Ok(f) => (f, FileFormat::Ascii),
Err(e) => return Err(SokobanError::from(e)),
}
}
};
Ok(match file_format {
FileFormat::Ascii => Collection::parse_lvl(short_name, level_file, parse_levels)?,
FileFormat::Xml => Collection::parse_xml(short_name, level_file, parse_levels)?,
})
}
/// Load a file containing a bunch of levels separated by an empty line, i.e. the usual ASCII
/// format.
fn parse_lvl(
short_name: &str,
file: File,
parse_levels: bool,
) -> Result<Collection, SokobanError> {
#[cfg(unix)]
const EMPTY_LINE: &str = "\n\n";
#[cfg(windows)]
const EMPTY_LINE: &str = "\r\n\r\n";
let eol = |c| c == '\n' || c == '\r';
let mut file = file;
// Read the collection’s file
let mut content = "".to_string();
file.read_to_string(&mut content)?;
let level_strings: Vec<_> = content
.split(EMPTY_LINE)
.map(|x| x.trim_matches(&eol))
.filter(|x|!x.is_empty())
.collect();
let name = level_strings[0].lines().next().unwrap();
let description = level_strings[0]
.splitn(1, &eol)
.last()
.map(|x| x.trim().to_owned());
// Parse the individual levels
let (num, levels) = {
if parse_levels {
let lvls = level_strings[1..]
.iter()
.enumerate()
.map(|(i, l)| Level::parse(i, l.trim_matches(&eol)))
.collect::<Result<Vec<_>, _>>()?;
(lvls.len(), lvls)
} else {
(level_strings.len() - 1, vec![])
}
};
Ok(Collection {
name: name.to_string(),
short_name: short_name.to_string(),
description,
number_of_levels: num,
levels,
})
}
/// Load a level set in the XML-based.slc format.
fn parse_xml(
short_name: &str,
file: File,
parse_levels: bool,
) -> Result<Collection, SokobanError> {
use quick_xml::events::Event;
use quick_xml::Reader;
enum State {
Nothing,
Title,
Description,
Email,
Url,
Line,
}
let file = ::std::io::BufReader::new(file);
let mut reader = Reader::from_reader(file);
let mut state = State::Nothing;
// Collection attributes
let mut title = String::new();
let mut description = String::new();
let mut email = String::new();
let mut url = String::new();
let mut levels = vec![];
// Level attributes
let mut num = 0;
let mut level_lines = String::new();
let mut buf = Vec::new();
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref e)) => match e.name() {
b"Title" => {
state = State::Title;
title.clear();
}
b"Description" => state = State::Description,
b"Email" => state = State::Email,
b"Url" => state = State::Url,
b"Level" => level_lines.clear(),
b"L" => state = State::Line,
_ => {}
},
Ok(Event::End(e)) => match e.name() {
b"Title" | b"Description" | b"Email" | b"Url" => state = State::Nothing,
b"Level" => {
if parse_levels {
levels.push(Level::parse(num, &level_lines)?);
}
num += 1;
}
b"L" => {
state = State::Nothing;
level_lines.push('\n');
}
_ => {} | },
Ok(Event::Text(ref e)) => match state {
State::Nothing => {}
State::Line if!parse_levels => {}
_ => {
let s = e.unescape_and_decode(&reader).unwrap();
match state {
State::Title => title.push_str(&s),
State::Description => description.push_str(&s),
State::Email => email.push_str(&s),
State::Url => url.push_str(&s),
State::Line => level_lines.push_str(&s),
_ => unreachable!(),
}
}
},
Ok(Event::Eof) => break,
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => {}
}
}
Ok(Collection {
name: title,
short_name: short_name.to_string(),
description: if description.is_empty() {
None
} else {
Some(description)
},
number_of_levels: num,
levels,
})
}
// Accessor methods
pub fn name(&self) -> &str {
&self.name
}
pub fn short_name(&self) -> &str {
&self.short_name
}
pub fn description(&self) -> Option<&str> {
match self.description {
Some(ref x) => Some(&x),
None => None,
}
}
pub fn first_level(&self) -> &Level {
&self.levels[0]
}
/// Get all levels.
pub fn levels(&self) -> &[Level] {
self.levels.as_ref()
}
pub fn number_of_levels(&self) -> usize {
self.number_of_levels
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn load_test_collections() {
assert!(Collection::parse("test_2").is_ok());
assert!(Collection::parse_metadata("test_2").is_ok());
assert!(Collection::parse("test3iuntrenutineaniutea").is_err());
assert!(Collection::parse_metadata("test3iuntrenutineaniutea").is_err());
}
}
| conditional_block |
|
chain.rs | extern crate futures;
extern crate tokio_core;
use std::net::TcpStream;
use std::thread;
use std::io::{Write, Read};
use futures::Future;
use futures::stream::Stream;
use tokio_core::io::read_to_end;
use tokio_core::net::TcpListener;
use tokio_core::reactor::Core;
macro_rules! t {
($e:expr) => (match $e {
Ok(e) => e,
Err(e) => panic!("{} failed with {:?}", stringify!($e), e),
})
}
#[test]
fn | () {
let mut l = t!(Core::new());
let srv = t!(TcpListener::bind(&t!("127.0.0.1:0".parse()), &l.handle()));
let addr = t!(srv.local_addr());
let t = thread::spawn(move || {
let mut s1 = TcpStream::connect(&addr).unwrap();
s1.write_all(b"foo ").unwrap();
let mut s2 = TcpStream::connect(&addr).unwrap();
s2.write_all(b"bar ").unwrap();
let mut s3 = TcpStream::connect(&addr).unwrap();
s3.write_all(b"baz").unwrap();
});
let clients = srv.incoming().map(|e| e.0).take(3);
let copied = clients.collect().and_then(|clients| {
let mut clients = clients.into_iter();
let a = clients.next().unwrap();
let b = clients.next().unwrap();
let c = clients.next().unwrap();
read_to_end(a.chain(b).chain(c), Vec::new())
});
let (_, data) = t!(l.run(copied));
t.join().unwrap();
assert_eq!(data, b"foo bar baz");
}
| chain_clients | identifier_name |
chain.rs | extern crate futures;
extern crate tokio_core;
use std::net::TcpStream;
use std::thread;
use std::io::{Write, Read};
use futures::Future;
use futures::stream::Stream;
use tokio_core::io::read_to_end;
use tokio_core::net::TcpListener;
use tokio_core::reactor::Core;
macro_rules! t {
($e:expr) => (match $e {
Ok(e) => e,
Err(e) => panic!("{} failed with {:?}", stringify!($e), e),
})
}
#[test]
fn chain_clients() {
let mut l = t!(Core::new());
let srv = t!(TcpListener::bind(&t!("127.0.0.1:0".parse()), &l.handle()));
let addr = t!(srv.local_addr());
let t = thread::spawn(move || {
let mut s1 = TcpStream::connect(&addr).unwrap();
s1.write_all(b"foo ").unwrap();
let mut s2 = TcpStream::connect(&addr).unwrap();
s2.write_all(b"bar ").unwrap();
let mut s3 = TcpStream::connect(&addr).unwrap();
s3.write_all(b"baz").unwrap();
});
let clients = srv.incoming().map(|e| e.0).take(3);
let copied = clients.collect().and_then(|clients| {
let mut clients = clients.into_iter();
let a = clients.next().unwrap();
let b = clients.next().unwrap();
let c = clients.next().unwrap();
read_to_end(a.chain(b).chain(c), Vec::new())
});
let (_, data) = t!(l.run(copied)); | t.join().unwrap();
assert_eq!(data, b"foo bar baz");
} | random_line_split |
|
chain.rs | extern crate futures;
extern crate tokio_core;
use std::net::TcpStream;
use std::thread;
use std::io::{Write, Read};
use futures::Future;
use futures::stream::Stream;
use tokio_core::io::read_to_end;
use tokio_core::net::TcpListener;
use tokio_core::reactor::Core;
macro_rules! t {
($e:expr) => (match $e {
Ok(e) => e,
Err(e) => panic!("{} failed with {:?}", stringify!($e), e),
})
}
#[test]
fn chain_clients() |
read_to_end(a.chain(b).chain(c), Vec::new())
});
let (_, data) = t!(l.run(copied));
t.join().unwrap();
assert_eq!(data, b"foo bar baz");
}
| {
let mut l = t!(Core::new());
let srv = t!(TcpListener::bind(&t!("127.0.0.1:0".parse()), &l.handle()));
let addr = t!(srv.local_addr());
let t = thread::spawn(move || {
let mut s1 = TcpStream::connect(&addr).unwrap();
s1.write_all(b"foo ").unwrap();
let mut s2 = TcpStream::connect(&addr).unwrap();
s2.write_all(b"bar ").unwrap();
let mut s3 = TcpStream::connect(&addr).unwrap();
s3.write_all(b"baz").unwrap();
});
let clients = srv.incoming().map(|e| e.0).take(3);
let copied = clients.collect().and_then(|clients| {
let mut clients = clients.into_iter();
let a = clients.next().unwrap();
let b = clients.next().unwrap();
let c = clients.next().unwrap(); | identifier_body |
vmem_serialize.rs | // Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
use crate::otp::lc_state::LcSecded;
use crate::util::present::Present;
use std::collections::HashMap;
use std::convert::TryInto;
use std::fmt::Write;
use anyhow::{anyhow, bail, ensure, Result};
use zerocopy::AsBytes;
enum ItemType {
Bytes(Vec<u8>),
Unvalued(usize),
}
/// The hex representation of an OTP item.
pub struct VmemItem {
value: ItemType,
offset: usize,
name: String,
}
impl VmemItem {
pub fn new(bytes: Vec<u8>, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Bytes(bytes),
offset,
name,
}
}
pub fn new_unvalued(size: usize, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Unvalued(size),
offset,
name,
}
}
pub fn size(&self) -> usize {
match &self.value {
ItemType::Bytes(b) => b.len(),
ItemType::Unvalued(size) => *size,
}
}
}
pub type DigestIV = u64;
pub type DigestCnst = u128;
/// Digest information for an OTP partition.
#[derive(PartialEq)]
pub enum DigestType {
Unlocked,
Software,
Hardware(DigestIV, DigestCnst),
}
/// The hex representation of an OTP partition.
pub struct VmemPartition {
/// Items associated with this partition.
items: Vec<VmemItem>,
/// The name of this partition.
/// Used in annotations.
name: String,
/// The type of digest used for this partition.
/// For software digests, the value of the digest is provided and appended to the list of
/// items. For hardware digests, we must compute the digest value and append to the list of
/// items.
digest_type: DigestType,
/// Partition size.
size: usize,
/// The key name for this parition.
/// If specified, the serializer will attempt to scramble this parition using the key named in
/// this field.
key_name: Option<String>,
}
impl VmemPartition {
pub fn new(
name: String,
size: usize,
digest_type: DigestType,
key_name: Option<String>,
) -> VmemPartition {
VmemPartition {
items: Vec::new(),
name,
digest_type,
size,
key_name,
}
}
/// Set the size of the partition.
///
/// For partitions that don't specify their size, this is used to set the size of the partition
/// including the digest.
pub fn set_size(&mut self, size: usize) {
self.size = size;
}
/// Add an item to this partition.
pub fn push_item(&mut self, item: VmemItem) {
self.items.push(item);
}
/// Produces a tuple containing OTP HEX lines with annotations.
fn write_to_buffer(&self, keys: &HashMap<String, Vec<u8>>) -> Result<(Vec<u8>, Vec<String>)> {
if self.size % 8!= 0 {
bail!("Partition {} must be 64-bit alligned", self.name);
}
let mut defined = vec![false; self.size];
let mut annotations: Vec<String> = vec!["unallocated".to_owned(); self.size];
let mut data_bytes: Vec<u8> = vec![0; self.size];
for item in &self.items {
let end = item.offset + item.size();
annotations[item.offset..end].fill(format!("{}: {}", self.name, item.name).to_string());
let defined = &mut defined[item.offset..end];
if let Some(collision) = defined.iter().position(|defined| *defined) {
bail!(
"Unexpected item collision with item {} at 0x{:x}",
item.name,
collision
);
}
defined.fill(true);
if let ItemType::Bytes(bytes) = &item.value {
data_bytes[item.offset..end].copy_from_slice(bytes);
}
}
let mut data_blocks = Vec::<u64>::new();
let mut data_blocks_defined = Vec::<bool>::new();
for (k, chunk) in data_bytes.chunks(8).enumerate() {
data_blocks.push(u64::from_le_bytes(chunk.try_into().unwrap()));
let byte_offset = k * 8;
data_blocks_defined.push(
defined[byte_offset..byte_offset + 8]
.iter()
.fold(false, |a, &b| a || b),
);
}
if let Some(key_name) = &self.key_name {
let key = keys
.get(key_name)
.ok_or_else(|| anyhow!("Key not found {}", key_name))?;
let cipher = Present::try_new(key.clone())?;
for i in 0..data_blocks.len() {
if data_blocks_defined[i] {
data_blocks[i] = cipher.encrypt_block(data_blocks[i]);
}
}
}
if let DigestType::Hardware(iv, fin_const) = self.digest_type {
ensure!(
matches!(data_blocks.last(), None | Some(0)),
"Digest of partition {} cannot be overridden manually",
self.name
);
let last = data_blocks.len() - 1;
data_blocks[last] = present_digest_64(&data_blocks[0..last], iv, fin_const);
}
let data = data_blocks.as_bytes().to_vec();
if data.len()!= self.size {
Err(anyhow!("Partition {} size mismatch", self.name))
} else {
Ok((data, annotations))
}
}
}
pub struct VmemImage {
partitions: Vec<VmemPartition>,
width: usize,
depth: usize,
}
impl VmemImage {
pub fn new(partitions: Vec<VmemPartition>, width: usize, depth: usize) -> VmemImage {
VmemImage {
partitions,
width,
depth,
}
}
pub fn generate(
&self,
keys: HashMap<String, Vec<u8>>,
secded: &LcSecded,
) -> Result<Vec<String>> {
let mut data: Vec<u8> = vec![0; self.width * self.depth];
let mut annotations: Vec<String> = vec![Default::default(); data.len()];
let mut offset = 0;
for partition in &self.partitions {
let (part_data, part_annotation) = partition.write_to_buffer(&keys)?;
let end = offset + partition.size;
if end > data.len() {
bail!(
"Partition {} out of bounds, ends at 0x{:x}",
partition.name,
end
);
}
data[offset..end].clone_from_slice(&part_data);
annotations[offset..end].clone_from_slice(&part_annotation);
offset += partition.size;
}
let width_ecc = self.width + secded.ecc_byte_len();
let num_words = data.len() / self.width;
let mut output = vec![format!(
"// OTP memory hexfile with {} x {}bit layout",
self.depth,
width_ecc * 8
)];
for i in 0..num_words {
let mut word = Vec::<u8>::new();
let mut word_annotation = Vec::<String>::new();
for j in 0..self.width {
let idx = i * self.width + j;
word.push(data[idx]);
if!word_annotation.contains(&annotations[idx]) |
}
let word_with_ecc = secded.ecc_encode(word)?;
let mut word_str = String::new();
for byte in word_with_ecc.iter().rev() {
write!(word_str, "{:02x}", byte)?;
}
output.push(format!(
"{} // {:06x}: {}",
word_str,
i * self.width,
word_annotation.join(", ")
));
}
Ok(output)
}
}
fn present_digest_64(message: &[u64], iv: DigestIV, fin_const: DigestCnst) -> u64 {
let mut state = iv;
for i in (0..message.len() + 2).step_by(2) {
let b128: [u8; 16] = if i + 1 < message.len() {
(message[i] as u128) << 64 | message[i + 1] as u128
} else if i < message.len() {
(message[i] as u128) << 64 | message[i] as u128
} else {
fin_const
}
.as_bytes()
.try_into()
.unwrap();
let cipher = Present::new_128(&b128);
state ^= cipher.encrypt_block(state);
}
state
}
| {
word_annotation.push(annotations[idx].clone());
} | conditional_block |
vmem_serialize.rs | // Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
use crate::otp::lc_state::LcSecded;
use crate::util::present::Present;
use std::collections::HashMap;
use std::convert::TryInto;
use std::fmt::Write;
use anyhow::{anyhow, bail, ensure, Result};
use zerocopy::AsBytes;
enum ItemType {
Bytes(Vec<u8>),
Unvalued(usize),
}
/// The hex representation of an OTP item.
pub struct VmemItem {
value: ItemType,
offset: usize,
name: String,
}
impl VmemItem {
pub fn new(bytes: Vec<u8>, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Bytes(bytes),
offset,
name,
}
}
pub fn new_unvalued(size: usize, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Unvalued(size),
offset,
name,
}
}
pub fn size(&self) -> usize {
match &self.value {
ItemType::Bytes(b) => b.len(),
ItemType::Unvalued(size) => *size,
}
}
}
pub type DigestIV = u64;
pub type DigestCnst = u128;
/// Digest information for an OTP partition.
#[derive(PartialEq)]
pub enum DigestType {
Unlocked,
Software,
Hardware(DigestIV, DigestCnst),
}
/// The hex representation of an OTP partition.
pub struct VmemPartition {
/// Items associated with this partition.
items: Vec<VmemItem>,
/// The name of this partition.
/// Used in annotations.
name: String,
/// The type of digest used for this partition.
/// For software digests, the value of the digest is provided and appended to the list of
/// items. For hardware digests, we must compute the digest value and append to the list of
/// items.
digest_type: DigestType,
/// Partition size.
size: usize,
/// The key name for this parition.
/// If specified, the serializer will attempt to scramble this parition using the key named in
/// this field.
key_name: Option<String>,
}
impl VmemPartition {
pub fn new(
name: String,
size: usize,
digest_type: DigestType,
key_name: Option<String>,
) -> VmemPartition {
VmemPartition {
items: Vec::new(),
name,
digest_type,
size,
key_name,
}
}
/// Set the size of the partition.
///
/// For partitions that don't specify their size, this is used to set the size of the partition
/// including the digest.
pub fn set_size(&mut self, size: usize) |
/// Add an item to this partition.
pub fn push_item(&mut self, item: VmemItem) {
self.items.push(item);
}
/// Produces a tuple containing OTP HEX lines with annotations.
fn write_to_buffer(&self, keys: &HashMap<String, Vec<u8>>) -> Result<(Vec<u8>, Vec<String>)> {
if self.size % 8!= 0 {
bail!("Partition {} must be 64-bit alligned", self.name);
}
let mut defined = vec![false; self.size];
let mut annotations: Vec<String> = vec!["unallocated".to_owned(); self.size];
let mut data_bytes: Vec<u8> = vec![0; self.size];
for item in &self.items {
let end = item.offset + item.size();
annotations[item.offset..end].fill(format!("{}: {}", self.name, item.name).to_string());
let defined = &mut defined[item.offset..end];
if let Some(collision) = defined.iter().position(|defined| *defined) {
bail!(
"Unexpected item collision with item {} at 0x{:x}",
item.name,
collision
);
}
defined.fill(true);
if let ItemType::Bytes(bytes) = &item.value {
data_bytes[item.offset..end].copy_from_slice(bytes);
}
}
let mut data_blocks = Vec::<u64>::new();
let mut data_blocks_defined = Vec::<bool>::new();
for (k, chunk) in data_bytes.chunks(8).enumerate() {
data_blocks.push(u64::from_le_bytes(chunk.try_into().unwrap()));
let byte_offset = k * 8;
data_blocks_defined.push(
defined[byte_offset..byte_offset + 8]
.iter()
.fold(false, |a, &b| a || b),
);
}
if let Some(key_name) = &self.key_name {
let key = keys
.get(key_name)
.ok_or_else(|| anyhow!("Key not found {}", key_name))?;
let cipher = Present::try_new(key.clone())?;
for i in 0..data_blocks.len() {
if data_blocks_defined[i] {
data_blocks[i] = cipher.encrypt_block(data_blocks[i]);
}
}
}
if let DigestType::Hardware(iv, fin_const) = self.digest_type {
ensure!(
matches!(data_blocks.last(), None | Some(0)),
"Digest of partition {} cannot be overridden manually",
self.name
);
let last = data_blocks.len() - 1;
data_blocks[last] = present_digest_64(&data_blocks[0..last], iv, fin_const);
}
let data = data_blocks.as_bytes().to_vec();
if data.len()!= self.size {
Err(anyhow!("Partition {} size mismatch", self.name))
} else {
Ok((data, annotations))
}
}
}
pub struct VmemImage {
partitions: Vec<VmemPartition>,
width: usize,
depth: usize,
}
impl VmemImage {
pub fn new(partitions: Vec<VmemPartition>, width: usize, depth: usize) -> VmemImage {
VmemImage {
partitions,
width,
depth,
}
}
pub fn generate(
&self,
keys: HashMap<String, Vec<u8>>,
secded: &LcSecded,
) -> Result<Vec<String>> {
let mut data: Vec<u8> = vec![0; self.width * self.depth];
let mut annotations: Vec<String> = vec![Default::default(); data.len()];
let mut offset = 0;
for partition in &self.partitions {
let (part_data, part_annotation) = partition.write_to_buffer(&keys)?;
let end = offset + partition.size;
if end > data.len() {
bail!(
"Partition {} out of bounds, ends at 0x{:x}",
partition.name,
end
);
}
data[offset..end].clone_from_slice(&part_data);
annotations[offset..end].clone_from_slice(&part_annotation);
offset += partition.size;
}
let width_ecc = self.width + secded.ecc_byte_len();
let num_words = data.len() / self.width;
let mut output = vec![format!(
"// OTP memory hexfile with {} x {}bit layout",
self.depth,
width_ecc * 8
)];
for i in 0..num_words {
let mut word = Vec::<u8>::new();
let mut word_annotation = Vec::<String>::new();
for j in 0..self.width {
let idx = i * self.width + j;
word.push(data[idx]);
if!word_annotation.contains(&annotations[idx]) {
word_annotation.push(annotations[idx].clone());
}
}
let word_with_ecc = secded.ecc_encode(word)?;
let mut word_str = String::new();
for byte in word_with_ecc.iter().rev() {
write!(word_str, "{:02x}", byte)?;
}
output.push(format!(
"{} // {:06x}: {}",
word_str,
i * self.width,
word_annotation.join(", ")
));
}
Ok(output)
}
}
fn present_digest_64(message: &[u64], iv: DigestIV, fin_const: DigestCnst) -> u64 {
let mut state = iv;
for i in (0..message.len() + 2).step_by(2) {
let b128: [u8; 16] = if i + 1 < message.len() {
(message[i] as u128) << 64 | message[i + 1] as u128
} else if i < message.len() {
(message[i] as u128) << 64 | message[i] as u128
} else {
fin_const
}
.as_bytes()
.try_into()
.unwrap();
let cipher = Present::new_128(&b128);
state ^= cipher.encrypt_block(state);
}
state
}
| {
self.size = size;
} | identifier_body |
vmem_serialize.rs | // Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details. | // SPDX-License-Identifier: Apache-2.0
use crate::otp::lc_state::LcSecded;
use crate::util::present::Present;
use std::collections::HashMap;
use std::convert::TryInto;
use std::fmt::Write;
use anyhow::{anyhow, bail, ensure, Result};
use zerocopy::AsBytes;
enum ItemType {
Bytes(Vec<u8>),
Unvalued(usize),
}
/// The hex representation of an OTP item.
pub struct VmemItem {
value: ItemType,
offset: usize,
name: String,
}
impl VmemItem {
pub fn new(bytes: Vec<u8>, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Bytes(bytes),
offset,
name,
}
}
pub fn new_unvalued(size: usize, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Unvalued(size),
offset,
name,
}
}
pub fn size(&self) -> usize {
match &self.value {
ItemType::Bytes(b) => b.len(),
ItemType::Unvalued(size) => *size,
}
}
}
pub type DigestIV = u64;
pub type DigestCnst = u128;
/// Digest information for an OTP partition.
#[derive(PartialEq)]
pub enum DigestType {
Unlocked,
Software,
Hardware(DigestIV, DigestCnst),
}
/// The hex representation of an OTP partition.
pub struct VmemPartition {
/// Items associated with this partition.
items: Vec<VmemItem>,
/// The name of this partition.
/// Used in annotations.
name: String,
/// The type of digest used for this partition.
/// For software digests, the value of the digest is provided and appended to the list of
/// items. For hardware digests, we must compute the digest value and append to the list of
/// items.
digest_type: DigestType,
/// Partition size.
size: usize,
/// The key name for this parition.
/// If specified, the serializer will attempt to scramble this parition using the key named in
/// this field.
key_name: Option<String>,
}
impl VmemPartition {
pub fn new(
name: String,
size: usize,
digest_type: DigestType,
key_name: Option<String>,
) -> VmemPartition {
VmemPartition {
items: Vec::new(),
name,
digest_type,
size,
key_name,
}
}
/// Set the size of the partition.
///
/// For partitions that don't specify their size, this is used to set the size of the partition
/// including the digest.
pub fn set_size(&mut self, size: usize) {
self.size = size;
}
/// Add an item to this partition.
pub fn push_item(&mut self, item: VmemItem) {
self.items.push(item);
}
/// Produces a tuple containing OTP HEX lines with annotations.
fn write_to_buffer(&self, keys: &HashMap<String, Vec<u8>>) -> Result<(Vec<u8>, Vec<String>)> {
if self.size % 8!= 0 {
bail!("Partition {} must be 64-bit alligned", self.name);
}
let mut defined = vec![false; self.size];
let mut annotations: Vec<String> = vec!["unallocated".to_owned(); self.size];
let mut data_bytes: Vec<u8> = vec![0; self.size];
for item in &self.items {
let end = item.offset + item.size();
annotations[item.offset..end].fill(format!("{}: {}", self.name, item.name).to_string());
let defined = &mut defined[item.offset..end];
if let Some(collision) = defined.iter().position(|defined| *defined) {
bail!(
"Unexpected item collision with item {} at 0x{:x}",
item.name,
collision
);
}
defined.fill(true);
if let ItemType::Bytes(bytes) = &item.value {
data_bytes[item.offset..end].copy_from_slice(bytes);
}
}
let mut data_blocks = Vec::<u64>::new();
let mut data_blocks_defined = Vec::<bool>::new();
for (k, chunk) in data_bytes.chunks(8).enumerate() {
data_blocks.push(u64::from_le_bytes(chunk.try_into().unwrap()));
let byte_offset = k * 8;
data_blocks_defined.push(
defined[byte_offset..byte_offset + 8]
.iter()
.fold(false, |a, &b| a || b),
);
}
if let Some(key_name) = &self.key_name {
let key = keys
.get(key_name)
.ok_or_else(|| anyhow!("Key not found {}", key_name))?;
let cipher = Present::try_new(key.clone())?;
for i in 0..data_blocks.len() {
if data_blocks_defined[i] {
data_blocks[i] = cipher.encrypt_block(data_blocks[i]);
}
}
}
if let DigestType::Hardware(iv, fin_const) = self.digest_type {
ensure!(
matches!(data_blocks.last(), None | Some(0)),
"Digest of partition {} cannot be overridden manually",
self.name
);
let last = data_blocks.len() - 1;
data_blocks[last] = present_digest_64(&data_blocks[0..last], iv, fin_const);
}
let data = data_blocks.as_bytes().to_vec();
if data.len()!= self.size {
Err(anyhow!("Partition {} size mismatch", self.name))
} else {
Ok((data, annotations))
}
}
}
pub struct VmemImage {
partitions: Vec<VmemPartition>,
width: usize,
depth: usize,
}
impl VmemImage {
pub fn new(partitions: Vec<VmemPartition>, width: usize, depth: usize) -> VmemImage {
VmemImage {
partitions,
width,
depth,
}
}
pub fn generate(
&self,
keys: HashMap<String, Vec<u8>>,
secded: &LcSecded,
) -> Result<Vec<String>> {
let mut data: Vec<u8> = vec![0; self.width * self.depth];
let mut annotations: Vec<String> = vec![Default::default(); data.len()];
let mut offset = 0;
for partition in &self.partitions {
let (part_data, part_annotation) = partition.write_to_buffer(&keys)?;
let end = offset + partition.size;
if end > data.len() {
bail!(
"Partition {} out of bounds, ends at 0x{:x}",
partition.name,
end
);
}
data[offset..end].clone_from_slice(&part_data);
annotations[offset..end].clone_from_slice(&part_annotation);
offset += partition.size;
}
let width_ecc = self.width + secded.ecc_byte_len();
let num_words = data.len() / self.width;
let mut output = vec![format!(
"// OTP memory hexfile with {} x {}bit layout",
self.depth,
width_ecc * 8
)];
for i in 0..num_words {
let mut word = Vec::<u8>::new();
let mut word_annotation = Vec::<String>::new();
for j in 0..self.width {
let idx = i * self.width + j;
word.push(data[idx]);
if!word_annotation.contains(&annotations[idx]) {
word_annotation.push(annotations[idx].clone());
}
}
let word_with_ecc = secded.ecc_encode(word)?;
let mut word_str = String::new();
for byte in word_with_ecc.iter().rev() {
write!(word_str, "{:02x}", byte)?;
}
output.push(format!(
"{} // {:06x}: {}",
word_str,
i * self.width,
word_annotation.join(", ")
));
}
Ok(output)
}
}
fn present_digest_64(message: &[u64], iv: DigestIV, fin_const: DigestCnst) -> u64 {
let mut state = iv;
for i in (0..message.len() + 2).step_by(2) {
let b128: [u8; 16] = if i + 1 < message.len() {
(message[i] as u128) << 64 | message[i + 1] as u128
} else if i < message.len() {
(message[i] as u128) << 64 | message[i] as u128
} else {
fin_const
}
.as_bytes()
.try_into()
.unwrap();
let cipher = Present::new_128(&b128);
state ^= cipher.encrypt_block(state);
}
state
} | random_line_split |
|
vmem_serialize.rs | // Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
use crate::otp::lc_state::LcSecded;
use crate::util::present::Present;
use std::collections::HashMap;
use std::convert::TryInto;
use std::fmt::Write;
use anyhow::{anyhow, bail, ensure, Result};
use zerocopy::AsBytes;
enum ItemType {
Bytes(Vec<u8>),
Unvalued(usize),
}
/// The hex representation of an OTP item.
pub struct VmemItem {
value: ItemType,
offset: usize,
name: String,
}
impl VmemItem {
pub fn new(bytes: Vec<u8>, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Bytes(bytes),
offset,
name,
}
}
pub fn new_unvalued(size: usize, offset: usize, name: String) -> VmemItem {
VmemItem {
value: ItemType::Unvalued(size),
offset,
name,
}
}
pub fn | (&self) -> usize {
match &self.value {
ItemType::Bytes(b) => b.len(),
ItemType::Unvalued(size) => *size,
}
}
}
pub type DigestIV = u64;
pub type DigestCnst = u128;
/// Digest information for an OTP partition.
#[derive(PartialEq)]
pub enum DigestType {
Unlocked,
Software,
Hardware(DigestIV, DigestCnst),
}
/// The hex representation of an OTP partition.
pub struct VmemPartition {
/// Items associated with this partition.
items: Vec<VmemItem>,
/// The name of this partition.
/// Used in annotations.
name: String,
/// The type of digest used for this partition.
/// For software digests, the value of the digest is provided and appended to the list of
/// items. For hardware digests, we must compute the digest value and append to the list of
/// items.
digest_type: DigestType,
/// Partition size.
size: usize,
/// The key name for this parition.
/// If specified, the serializer will attempt to scramble this parition using the key named in
/// this field.
key_name: Option<String>,
}
impl VmemPartition {
pub fn new(
name: String,
size: usize,
digest_type: DigestType,
key_name: Option<String>,
) -> VmemPartition {
VmemPartition {
items: Vec::new(),
name,
digest_type,
size,
key_name,
}
}
/// Set the size of the partition.
///
/// For partitions that don't specify their size, this is used to set the size of the partition
/// including the digest.
pub fn set_size(&mut self, size: usize) {
self.size = size;
}
/// Add an item to this partition.
pub fn push_item(&mut self, item: VmemItem) {
self.items.push(item);
}
/// Produces a tuple containing OTP HEX lines with annotations.
fn write_to_buffer(&self, keys: &HashMap<String, Vec<u8>>) -> Result<(Vec<u8>, Vec<String>)> {
if self.size % 8!= 0 {
bail!("Partition {} must be 64-bit alligned", self.name);
}
let mut defined = vec![false; self.size];
let mut annotations: Vec<String> = vec!["unallocated".to_owned(); self.size];
let mut data_bytes: Vec<u8> = vec![0; self.size];
for item in &self.items {
let end = item.offset + item.size();
annotations[item.offset..end].fill(format!("{}: {}", self.name, item.name).to_string());
let defined = &mut defined[item.offset..end];
if let Some(collision) = defined.iter().position(|defined| *defined) {
bail!(
"Unexpected item collision with item {} at 0x{:x}",
item.name,
collision
);
}
defined.fill(true);
if let ItemType::Bytes(bytes) = &item.value {
data_bytes[item.offset..end].copy_from_slice(bytes);
}
}
let mut data_blocks = Vec::<u64>::new();
let mut data_blocks_defined = Vec::<bool>::new();
for (k, chunk) in data_bytes.chunks(8).enumerate() {
data_blocks.push(u64::from_le_bytes(chunk.try_into().unwrap()));
let byte_offset = k * 8;
data_blocks_defined.push(
defined[byte_offset..byte_offset + 8]
.iter()
.fold(false, |a, &b| a || b),
);
}
if let Some(key_name) = &self.key_name {
let key = keys
.get(key_name)
.ok_or_else(|| anyhow!("Key not found {}", key_name))?;
let cipher = Present::try_new(key.clone())?;
for i in 0..data_blocks.len() {
if data_blocks_defined[i] {
data_blocks[i] = cipher.encrypt_block(data_blocks[i]);
}
}
}
if let DigestType::Hardware(iv, fin_const) = self.digest_type {
ensure!(
matches!(data_blocks.last(), None | Some(0)),
"Digest of partition {} cannot be overridden manually",
self.name
);
let last = data_blocks.len() - 1;
data_blocks[last] = present_digest_64(&data_blocks[0..last], iv, fin_const);
}
let data = data_blocks.as_bytes().to_vec();
if data.len()!= self.size {
Err(anyhow!("Partition {} size mismatch", self.name))
} else {
Ok((data, annotations))
}
}
}
pub struct VmemImage {
partitions: Vec<VmemPartition>,
width: usize,
depth: usize,
}
impl VmemImage {
pub fn new(partitions: Vec<VmemPartition>, width: usize, depth: usize) -> VmemImage {
VmemImage {
partitions,
width,
depth,
}
}
pub fn generate(
&self,
keys: HashMap<String, Vec<u8>>,
secded: &LcSecded,
) -> Result<Vec<String>> {
let mut data: Vec<u8> = vec![0; self.width * self.depth];
let mut annotations: Vec<String> = vec![Default::default(); data.len()];
let mut offset = 0;
for partition in &self.partitions {
let (part_data, part_annotation) = partition.write_to_buffer(&keys)?;
let end = offset + partition.size;
if end > data.len() {
bail!(
"Partition {} out of bounds, ends at 0x{:x}",
partition.name,
end
);
}
data[offset..end].clone_from_slice(&part_data);
annotations[offset..end].clone_from_slice(&part_annotation);
offset += partition.size;
}
let width_ecc = self.width + secded.ecc_byte_len();
let num_words = data.len() / self.width;
let mut output = vec![format!(
"// OTP memory hexfile with {} x {}bit layout",
self.depth,
width_ecc * 8
)];
for i in 0..num_words {
let mut word = Vec::<u8>::new();
let mut word_annotation = Vec::<String>::new();
for j in 0..self.width {
let idx = i * self.width + j;
word.push(data[idx]);
if!word_annotation.contains(&annotations[idx]) {
word_annotation.push(annotations[idx].clone());
}
}
let word_with_ecc = secded.ecc_encode(word)?;
let mut word_str = String::new();
for byte in word_with_ecc.iter().rev() {
write!(word_str, "{:02x}", byte)?;
}
output.push(format!(
"{} // {:06x}: {}",
word_str,
i * self.width,
word_annotation.join(", ")
));
}
Ok(output)
}
}
fn present_digest_64(message: &[u64], iv: DigestIV, fin_const: DigestCnst) -> u64 {
let mut state = iv;
for i in (0..message.len() + 2).step_by(2) {
let b128: [u8; 16] = if i + 1 < message.len() {
(message[i] as u128) << 64 | message[i + 1] as u128
} else if i < message.len() {
(message[i] as u128) << 64 | message[i] as u128
} else {
fin_const
}
.as_bytes()
.try_into()
.unwrap();
let cipher = Present::new_128(&b128);
state ^= cipher.encrypt_block(state);
}
state
}
| size | identifier_name |
event_loop.rs | use std::sync::{Arc, Condvar, Mutex};
use std::thread::spawn;
use std::time::{Duration, Instant};
use super::schedule_queue::*;
use super::scheduler::*;
#[derive(Clone)]
pub struct EventLoop {
queue: Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>,
}
impl EventLoop {
/// Creates a new EventLoop
pub fn | () -> Self {
let queue = Arc::new((Mutex::new(ScheduleQueue::new()), Condvar::new()));
let scheduler = EventLoop { queue: queue.clone() };
spawn(move || {
loop {
let mut action = dequeue(&queue);
action.invoke();
}
});
scheduler
}
}
fn dequeue(queue: &Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>) -> Box<Action> {
let (ref mutex, ref cvar) = **queue;
let mut queue = mutex.lock().unwrap();
loop {
if let Some(record) = queue.dequeue() {
let now = Instant::now();
if record.1 <= now {
return record.0;
} else {
let timeout = now - record.1;
let r = cvar.wait_timeout(queue, timeout).unwrap();
queue = r.0;
if r.1.timed_out() {
return record.0;
} else {
queue.enqueue(record);
continue;
}
}
} else {
queue = cvar.wait(queue).unwrap();
}
}
}
impl ParallelScheduler for EventLoop {
fn schedule<F>(&self, func: F, delay: Duration)
where F: FnOnce() + Send +'static
{
let due = Instant::now() + delay;
let &(ref mutex, ref cvar) = &*self.queue;
mutex.lock().unwrap().enqueue((Box::new(Some(func)), due));
cvar.notify_one();
}
}
trait Action {
fn invoke(&mut self);
}
impl<F> Action for Option<F>
where F: FnOnce() + Send
{
fn invoke(&mut self) {
if let Some(action) = self.take() {
action();
}
}
}
| new | identifier_name |
event_loop.rs | use std::sync::{Arc, Condvar, Mutex};
use std::thread::spawn;
use std::time::{Duration, Instant};
use super::schedule_queue::*;
use super::scheduler::*;
#[derive(Clone)]
pub struct EventLoop {
queue: Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>,
}
impl EventLoop {
/// Creates a new EventLoop
pub fn new() -> Self {
let queue = Arc::new((Mutex::new(ScheduleQueue::new()), Condvar::new()));
let scheduler = EventLoop { queue: queue.clone() };
spawn(move || {
loop {
let mut action = dequeue(&queue);
action.invoke();
}
});
scheduler
}
}
fn dequeue(queue: &Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>) -> Box<Action> {
let (ref mutex, ref cvar) = **queue;
let mut queue = mutex.lock().unwrap();
loop {
if let Some(record) = queue.dequeue() {
let now = Instant::now();
if record.1 <= now {
return record.0;
} else {
let timeout = now - record.1;
let r = cvar.wait_timeout(queue, timeout).unwrap();
queue = r.0;
if r.1.timed_out() {
return record.0;
} else {
queue.enqueue(record);
continue;
}
}
} else {
queue = cvar.wait(queue).unwrap();
}
}
}
impl ParallelScheduler for EventLoop {
fn schedule<F>(&self, func: F, delay: Duration)
where F: FnOnce() + Send +'static
{
let due = Instant::now() + delay;
let &(ref mutex, ref cvar) = &*self.queue;
mutex.lock().unwrap().enqueue((Box::new(Some(func)), due));
cvar.notify_one();
}
}
trait Action {
fn invoke(&mut self);
}
impl<F> Action for Option<F>
where F: FnOnce() + Send
{
fn invoke(&mut self) {
if let Some(action) = self.take() |
}
}
| {
action();
} | conditional_block |
event_loop.rs | use std::sync::{Arc, Condvar, Mutex};
use std::thread::spawn;
use std::time::{Duration, Instant};
use super::schedule_queue::*;
use super::scheduler::*;
#[derive(Clone)]
pub struct EventLoop {
queue: Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>,
}
impl EventLoop {
/// Creates a new EventLoop
pub fn new() -> Self {
let queue = Arc::new((Mutex::new(ScheduleQueue::new()), Condvar::new()));
let scheduler = EventLoop { queue: queue.clone() };
spawn(move || {
loop {
let mut action = dequeue(&queue);
action.invoke();
}
});
scheduler
}
}
fn dequeue(queue: &Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>) -> Box<Action> {
let (ref mutex, ref cvar) = **queue;
let mut queue = mutex.lock().unwrap();
loop {
if let Some(record) = queue.dequeue() {
let now = Instant::now();
if record.1 <= now {
return record.0;
} else {
let timeout = now - record.1; | } else {
queue.enqueue(record);
continue;
}
}
} else {
queue = cvar.wait(queue).unwrap();
}
}
}
impl ParallelScheduler for EventLoop {
fn schedule<F>(&self, func: F, delay: Duration)
where F: FnOnce() + Send +'static
{
let due = Instant::now() + delay;
let &(ref mutex, ref cvar) = &*self.queue;
mutex.lock().unwrap().enqueue((Box::new(Some(func)), due));
cvar.notify_one();
}
}
trait Action {
fn invoke(&mut self);
}
impl<F> Action for Option<F>
where F: FnOnce() + Send
{
fn invoke(&mut self) {
if let Some(action) = self.take() {
action();
}
}
} | let r = cvar.wait_timeout(queue, timeout).unwrap();
queue = r.0;
if r.1.timed_out() {
return record.0; | random_line_split |
event_loop.rs | use std::sync::{Arc, Condvar, Mutex};
use std::thread::spawn;
use std::time::{Duration, Instant};
use super::schedule_queue::*;
use super::scheduler::*;
#[derive(Clone)]
pub struct EventLoop {
queue: Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>,
}
impl EventLoop {
/// Creates a new EventLoop
pub fn new() -> Self {
let queue = Arc::new((Mutex::new(ScheduleQueue::new()), Condvar::new()));
let scheduler = EventLoop { queue: queue.clone() };
spawn(move || {
loop {
let mut action = dequeue(&queue);
action.invoke();
}
});
scheduler
}
}
fn dequeue(queue: &Arc<(Mutex<ScheduleQueue<Box<Action + Send>>>, Condvar)>) -> Box<Action> {
let (ref mutex, ref cvar) = **queue;
let mut queue = mutex.lock().unwrap();
loop {
if let Some(record) = queue.dequeue() {
let now = Instant::now();
if record.1 <= now {
return record.0;
} else {
let timeout = now - record.1;
let r = cvar.wait_timeout(queue, timeout).unwrap();
queue = r.0;
if r.1.timed_out() {
return record.0;
} else {
queue.enqueue(record);
continue;
}
}
} else {
queue = cvar.wait(queue).unwrap();
}
}
}
impl ParallelScheduler for EventLoop {
fn schedule<F>(&self, func: F, delay: Duration)
where F: FnOnce() + Send +'static
{
let due = Instant::now() + delay;
let &(ref mutex, ref cvar) = &*self.queue;
mutex.lock().unwrap().enqueue((Box::new(Some(func)), due));
cvar.notify_one();
}
}
trait Action {
fn invoke(&mut self);
}
impl<F> Action for Option<F>
where F: FnOnce() + Send
{
fn invoke(&mut self) |
}
| {
if let Some(action) = self.take() {
action();
}
} | identifier_body |
builder.rs | //! Cretonne instruction builder.
//!
//! A `Builder` provides a convenient interface for inserting instructions into a Cretonne
//! function. Many of its methods are generated from the meta language instruction definitions.
use ir;
use ir::types;
use ir::{InstructionData, DataFlowGraph};
use ir::{Opcode, Type, Inst, Value};
use isa;
/// Base trait for instruction builders.
///
/// The `InstBuilderBase` trait provides the basic functionality required by the methods of the
/// generated `InstBuilder` trait. These methods should not normally be used directly. Use the
/// methods in the `InstBuilder` trait instead.
///
/// Any data type that implements `InstBuilderBase` also gets all the methods of the `InstBuilder`
/// trait.
pub trait InstBuilderBase<'f>: Sized {
/// Get an immutable reference to the data flow graph that will hold the constructed
/// instructions.
fn data_flow_graph(&self) -> &DataFlowGraph;
/// Get a mutable reference to the data flow graph that will hold the constructed
/// instructions.
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph;
/// Insert an instruction and return a reference to it, consuming the builder.
///
/// The result types may depend on a controlling type variable. For non-polymorphic
/// instructions with multiple results, pass `VOID` for the `ctrl_typevar` argument.
fn build(self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph);
}
// Include trait code generated by `lib/cretonne/meta/gen_instr.py`.
//
// This file defines the `InstBuilder` trait as an extension of `InstBuilderBase` with methods per
// instruction format and per opcode.
include!(concat!(env!("OUT_DIR"), "/inst_builder.rs"));
/// Any type implementing `InstBuilderBase` gets all the `InstBuilder` methods for free.
impl<'f, T: InstBuilderBase<'f>> InstBuilder<'f> for T {}
/// Base trait for instruction inserters.
///
/// This is an alternative base trait for an instruction builder to implement.
///
/// An instruction inserter can be adapted into an instruction builder by wrapping it in an
/// `InsertBuilder`. This provides some common functionality for instruction builders that insert
/// new instructions, as opposed to the `ReplaceBuilder` which overwrites existing instructions.
pub trait InstInserterBase<'f>: Sized {
/// Get an immutable reference to the data flow graph.
fn data_flow_graph(&self) -> &DataFlowGraph;
/// Get a mutable reference to the data flow graph.
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph;
/// Insert a new instruction which belongs to the DFG.
fn insert_built_inst(self, inst: Inst, ctrl_typevar: Type) -> &'f mut DataFlowGraph;
}
use std::marker::PhantomData;
/// Builder that inserts an instruction at the current position.
///
/// An `InsertBuilder` is a wrapper for an `InstInserterBase` that turns it into an instruction
/// builder with some additional facilities for creating instructions that reuse existing values as
/// their results.
pub struct InsertBuilder<'f, IIB: InstInserterBase<'f>> {
inserter: IIB,
unused: PhantomData<&'f u32>,
}
impl<'f, IIB: InstInserterBase<'f>> InsertBuilder<'f, IIB> {
/// Create a new builder which inserts instructions at `pos`.
/// The `dfg` and `pos.layout` references should be from the same `Function`.
pub fn new(inserter: IIB) -> InsertBuilder<'f, IIB> {
InsertBuilder {
inserter,
unused: PhantomData,
}
}
/// Reuse result values in `reuse`.
///
/// Convert this builder into one that will reuse the provided result values instead of
/// allocating new ones. The provided values for reuse must not be attached to anything. Any
/// missing result values will be allocated as normal.
///
/// The `reuse` argument is expected to be an array of `Option<Value>`.
pub fn with_results<Array>(self, reuse: Array) -> InsertReuseBuilder<'f, IIB, Array>
where
Array: AsRef<[Option<Value>]>,
{
InsertReuseBuilder {
inserter: self.inserter,
reuse,
unused: PhantomData,
}
}
/// Reuse a single result value.
///
/// Convert this into a builder that will reuse `v` as the single result value. The reused
/// result value `v` must not be attached to anything.
///
/// This method should only be used when building an instruction with exactly one result. Use
/// `with_results()` for the more general case.
pub fn with_result(self, v: Value) -> InsertReuseBuilder<'f, IIB, [Option<Value>; 1]> {
// TODO: Specialize this to return a different builder that just attaches `v` instead of
// calling `make_inst_results_reusing()`.
self.with_results([Some(v)])
}
}
impl<'f, IIB: InstInserterBase<'f>> InstBuilderBase<'f> for InsertBuilder<'f, IIB> {
fn data_flow_graph(&self) -> &DataFlowGraph {
self.inserter.data_flow_graph()
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.inserter.data_flow_graph_mut()
}
fn build(mut self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
let inst;
{
let dfg = self.inserter.data_flow_graph_mut();
inst = dfg.make_inst(data);
dfg.make_inst_results(inst, ctrl_typevar);
}
(inst, self.inserter.insert_built_inst(inst, ctrl_typevar))
}
}
/// Builder that inserts a new instruction like `InsertBuilder`, but reusing result values.
pub struct InsertReuseBuilder<'f, IIB, Array>
where
IIB: InstInserterBase<'f>,
Array: AsRef<[Option<Value>]>,
{
inserter: IIB,
reuse: Array,
unused: PhantomData<&'f u32>,
}
impl<'f, IIB, Array> InstBuilderBase<'f> for InsertReuseBuilder<'f, IIB, Array>
where IIB: InstInserterBase<'f>,
Array: AsRef<[Option<Value>]>
{
fn data_flow_graph(&self) -> &DataFlowGraph {
self.inserter.data_flow_graph()
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.inserter.data_flow_graph_mut()
}
fn build(mut self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
let inst;
{
let dfg = self.inserter.data_flow_graph_mut();
inst = dfg.make_inst(data);
// Make an `Interator<Item = Option<Value>>`.
let ru = self.reuse.as_ref().iter().cloned();
dfg.make_inst_results_reusing(inst, ctrl_typevar, ru);
}
(inst, self.inserter.insert_built_inst(inst, ctrl_typevar))
}
}
/// Instruction builder that replaces an existing instruction.
///
/// The inserted instruction will have the same `Inst` number as the old one.
///
/// If the old instruction still has result values attached, it is assumed that the new instruction
/// produces the same number and types of results. The old result values are preserved. If the
/// replacement instruction format does not support multiple results, the builder panics. It is a
/// bug to leave result values dangling.
pub struct ReplaceBuilder<'f> {
dfg: &'f mut DataFlowGraph,
inst: Inst,
}
impl<'f> ReplaceBuilder<'f> {
/// Create a `ReplaceBuilder` that will overwrite `inst`.
pub fn new(dfg: &'f mut DataFlowGraph, inst: Inst) -> ReplaceBuilder {
ReplaceBuilder { dfg, inst }
}
}
impl<'f> InstBuilderBase<'f> for ReplaceBuilder<'f> {
fn data_flow_graph(&self) -> &DataFlowGraph {
self.dfg
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.dfg
}
fn build(self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
// Splat the new instruction on top of the old one.
self.dfg[self.inst] = data;
if!self.dfg.has_results(self.inst) |
(self.inst, self.dfg)
}
}
#[cfg(test)]
mod tests {
use cursor::{Cursor, FuncCursor};
use ir::{Function, InstBuilder, ValueDef};
use ir::types::*;
use ir::condcodes::*;
#[test]
fn types() {
let mut func = Function::new();
let ebb0 = func.dfg.make_ebb();
let arg0 = func.dfg.append_ebb_param(ebb0, I32);
let mut pos = FuncCursor::new(&mut func);
pos.insert_ebb(ebb0);
// Explicit types.
let v0 = pos.ins().iconst(I32, 3);
assert_eq!(pos.func.dfg.value_type(v0), I32);
// Inferred from inputs.
let v1 = pos.ins().iadd(arg0, v0);
assert_eq!(pos.func.dfg.value_type(v1), I32);
// Formula.
let cmp = pos.ins().icmp(IntCC::Equal, arg0, v0);
assert_eq!(pos.func.dfg.value_type(cmp), B1);
}
#[test]
fn reuse_results() {
let mut func = Function::new();
let ebb0 = func.dfg.make_ebb();
let arg0 = func.dfg.append_ebb_param(ebb0, I32);
let mut pos = FuncCursor::new(&mut func);
pos.insert_ebb(ebb0);
let v0 = pos.ins().iadd_imm(arg0, 17);
assert_eq!(pos.func.dfg.value_type(v0), I32);
let iadd = pos.prev_inst().unwrap();
assert_eq!(pos.func.dfg.value_def(v0), ValueDef::Result(iadd, 0));
// Detach v0 and reuse it for a different instruction.
pos.func.dfg.clear_results(iadd);
let v0b = pos.ins().with_result(v0).iconst(I32, 3);
assert_eq!(v0, v0b);
assert_eq!(pos.current_inst(), Some(iadd));
let iconst = pos.prev_inst().unwrap();
assert!(iadd!= iconst);
assert_eq!(pos.func.dfg.value_def(v0), ValueDef::Result(iconst, 0));
}
}
| {
// The old result values were either detached or non-existent.
// Construct new ones.
self.dfg.make_inst_results(self.inst, ctrl_typevar);
} | conditional_block |
builder.rs | //! Cretonne instruction builder.
//!
//! A `Builder` provides a convenient interface for inserting instructions into a Cretonne
//! function. Many of its methods are generated from the meta language instruction definitions.
use ir;
use ir::types;
use ir::{InstructionData, DataFlowGraph};
use ir::{Opcode, Type, Inst, Value};
use isa;
/// Base trait for instruction builders.
///
/// The `InstBuilderBase` trait provides the basic functionality required by the methods of the
/// generated `InstBuilder` trait. These methods should not normally be used directly. Use the
/// methods in the `InstBuilder` trait instead.
///
/// Any data type that implements `InstBuilderBase` also gets all the methods of the `InstBuilder`
/// trait.
pub trait InstBuilderBase<'f>: Sized {
/// Get an immutable reference to the data flow graph that will hold the constructed
/// instructions.
fn data_flow_graph(&self) -> &DataFlowGraph;
/// Get a mutable reference to the data flow graph that will hold the constructed
/// instructions.
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph;
/// Insert an instruction and return a reference to it, consuming the builder.
///
/// The result types may depend on a controlling type variable. For non-polymorphic
/// instructions with multiple results, pass `VOID` for the `ctrl_typevar` argument.
fn build(self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph);
}
// Include trait code generated by `lib/cretonne/meta/gen_instr.py`.
//
// This file defines the `InstBuilder` trait as an extension of `InstBuilderBase` with methods per
// instruction format and per opcode.
include!(concat!(env!("OUT_DIR"), "/inst_builder.rs"));
/// Any type implementing `InstBuilderBase` gets all the `InstBuilder` methods for free.
impl<'f, T: InstBuilderBase<'f>> InstBuilder<'f> for T {}
/// Base trait for instruction inserters.
///
/// This is an alternative base trait for an instruction builder to implement.
///
/// An instruction inserter can be adapted into an instruction builder by wrapping it in an
/// `InsertBuilder`. This provides some common functionality for instruction builders that insert
/// new instructions, as opposed to the `ReplaceBuilder` which overwrites existing instructions.
pub trait InstInserterBase<'f>: Sized {
/// Get an immutable reference to the data flow graph.
fn data_flow_graph(&self) -> &DataFlowGraph;
/// Get a mutable reference to the data flow graph.
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph;
/// Insert a new instruction which belongs to the DFG.
fn insert_built_inst(self, inst: Inst, ctrl_typevar: Type) -> &'f mut DataFlowGraph;
}
use std::marker::PhantomData;
/// Builder that inserts an instruction at the current position.
///
/// An `InsertBuilder` is a wrapper for an `InstInserterBase` that turns it into an instruction
/// builder with some additional facilities for creating instructions that reuse existing values as
/// their results.
pub struct InsertBuilder<'f, IIB: InstInserterBase<'f>> {
inserter: IIB,
unused: PhantomData<&'f u32>,
}
impl<'f, IIB: InstInserterBase<'f>> InsertBuilder<'f, IIB> {
/// Create a new builder which inserts instructions at `pos`.
/// The `dfg` and `pos.layout` references should be from the same `Function`.
pub fn new(inserter: IIB) -> InsertBuilder<'f, IIB> {
InsertBuilder {
inserter,
unused: PhantomData,
}
}
/// Reuse result values in `reuse`.
///
/// Convert this builder into one that will reuse the provided result values instead of
/// allocating new ones. The provided values for reuse must not be attached to anything. Any
/// missing result values will be allocated as normal.
///
/// The `reuse` argument is expected to be an array of `Option<Value>`.
pub fn with_results<Array>(self, reuse: Array) -> InsertReuseBuilder<'f, IIB, Array>
where
Array: AsRef<[Option<Value>]>,
{
InsertReuseBuilder {
inserter: self.inserter,
reuse,
unused: PhantomData,
}
}
/// Reuse a single result value.
///
/// Convert this into a builder that will reuse `v` as the single result value. The reused
/// result value `v` must not be attached to anything.
///
/// This method should only be used when building an instruction with exactly one result. Use
/// `with_results()` for the more general case.
pub fn with_result(self, v: Value) -> InsertReuseBuilder<'f, IIB, [Option<Value>; 1]> {
// TODO: Specialize this to return a different builder that just attaches `v` instead of
// calling `make_inst_results_reusing()`.
self.with_results([Some(v)])
}
}
impl<'f, IIB: InstInserterBase<'f>> InstBuilderBase<'f> for InsertBuilder<'f, IIB> {
fn data_flow_graph(&self) -> &DataFlowGraph {
self.inserter.data_flow_graph()
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.inserter.data_flow_graph_mut()
}
fn build(mut self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
let inst;
{
let dfg = self.inserter.data_flow_graph_mut();
inst = dfg.make_inst(data);
dfg.make_inst_results(inst, ctrl_typevar);
}
(inst, self.inserter.insert_built_inst(inst, ctrl_typevar))
}
}
/// Builder that inserts a new instruction like `InsertBuilder`, but reusing result values.
pub struct InsertReuseBuilder<'f, IIB, Array>
where
IIB: InstInserterBase<'f>,
Array: AsRef<[Option<Value>]>,
{
inserter: IIB,
reuse: Array,
unused: PhantomData<&'f u32>,
}
impl<'f, IIB, Array> InstBuilderBase<'f> for InsertReuseBuilder<'f, IIB, Array>
where IIB: InstInserterBase<'f>,
Array: AsRef<[Option<Value>]>
{
fn data_flow_graph(&self) -> &DataFlowGraph {
self.inserter.data_flow_graph()
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.inserter.data_flow_graph_mut()
}
fn build(mut self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
let inst; | inst = dfg.make_inst(data);
// Make an `Interator<Item = Option<Value>>`.
let ru = self.reuse.as_ref().iter().cloned();
dfg.make_inst_results_reusing(inst, ctrl_typevar, ru);
}
(inst, self.inserter.insert_built_inst(inst, ctrl_typevar))
}
}
/// Instruction builder that replaces an existing instruction.
///
/// The inserted instruction will have the same `Inst` number as the old one.
///
/// If the old instruction still has result values attached, it is assumed that the new instruction
/// produces the same number and types of results. The old result values are preserved. If the
/// replacement instruction format does not support multiple results, the builder panics. It is a
/// bug to leave result values dangling.
pub struct ReplaceBuilder<'f> {
dfg: &'f mut DataFlowGraph,
inst: Inst,
}
impl<'f> ReplaceBuilder<'f> {
/// Create a `ReplaceBuilder` that will overwrite `inst`.
pub fn new(dfg: &'f mut DataFlowGraph, inst: Inst) -> ReplaceBuilder {
ReplaceBuilder { dfg, inst }
}
}
impl<'f> InstBuilderBase<'f> for ReplaceBuilder<'f> {
fn data_flow_graph(&self) -> &DataFlowGraph {
self.dfg
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.dfg
}
fn build(self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
// Splat the new instruction on top of the old one.
self.dfg[self.inst] = data;
if!self.dfg.has_results(self.inst) {
// The old result values were either detached or non-existent.
// Construct new ones.
self.dfg.make_inst_results(self.inst, ctrl_typevar);
}
(self.inst, self.dfg)
}
}
#[cfg(test)]
mod tests {
use cursor::{Cursor, FuncCursor};
use ir::{Function, InstBuilder, ValueDef};
use ir::types::*;
use ir::condcodes::*;
#[test]
fn types() {
let mut func = Function::new();
let ebb0 = func.dfg.make_ebb();
let arg0 = func.dfg.append_ebb_param(ebb0, I32);
let mut pos = FuncCursor::new(&mut func);
pos.insert_ebb(ebb0);
// Explicit types.
let v0 = pos.ins().iconst(I32, 3);
assert_eq!(pos.func.dfg.value_type(v0), I32);
// Inferred from inputs.
let v1 = pos.ins().iadd(arg0, v0);
assert_eq!(pos.func.dfg.value_type(v1), I32);
// Formula.
let cmp = pos.ins().icmp(IntCC::Equal, arg0, v0);
assert_eq!(pos.func.dfg.value_type(cmp), B1);
}
#[test]
fn reuse_results() {
let mut func = Function::new();
let ebb0 = func.dfg.make_ebb();
let arg0 = func.dfg.append_ebb_param(ebb0, I32);
let mut pos = FuncCursor::new(&mut func);
pos.insert_ebb(ebb0);
let v0 = pos.ins().iadd_imm(arg0, 17);
assert_eq!(pos.func.dfg.value_type(v0), I32);
let iadd = pos.prev_inst().unwrap();
assert_eq!(pos.func.dfg.value_def(v0), ValueDef::Result(iadd, 0));
// Detach v0 and reuse it for a different instruction.
pos.func.dfg.clear_results(iadd);
let v0b = pos.ins().with_result(v0).iconst(I32, 3);
assert_eq!(v0, v0b);
assert_eq!(pos.current_inst(), Some(iadd));
let iconst = pos.prev_inst().unwrap();
assert!(iadd!= iconst);
assert_eq!(pos.func.dfg.value_def(v0), ValueDef::Result(iconst, 0));
}
} | {
let dfg = self.inserter.data_flow_graph_mut(); | random_line_split |
builder.rs | //! Cretonne instruction builder.
//!
//! A `Builder` provides a convenient interface for inserting instructions into a Cretonne
//! function. Many of its methods are generated from the meta language instruction definitions.
use ir;
use ir::types;
use ir::{InstructionData, DataFlowGraph};
use ir::{Opcode, Type, Inst, Value};
use isa;
/// Base trait for instruction builders.
///
/// The `InstBuilderBase` trait provides the basic functionality required by the methods of the
/// generated `InstBuilder` trait. These methods should not normally be used directly. Use the
/// methods in the `InstBuilder` trait instead.
///
/// Any data type that implements `InstBuilderBase` also gets all the methods of the `InstBuilder`
/// trait.
pub trait InstBuilderBase<'f>: Sized {
/// Get an immutable reference to the data flow graph that will hold the constructed
/// instructions.
fn data_flow_graph(&self) -> &DataFlowGraph;
/// Get a mutable reference to the data flow graph that will hold the constructed
/// instructions.
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph;
/// Insert an instruction and return a reference to it, consuming the builder.
///
/// The result types may depend on a controlling type variable. For non-polymorphic
/// instructions with multiple results, pass `VOID` for the `ctrl_typevar` argument.
fn build(self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph);
}
// Include trait code generated by `lib/cretonne/meta/gen_instr.py`.
//
// This file defines the `InstBuilder` trait as an extension of `InstBuilderBase` with methods per
// instruction format and per opcode.
include!(concat!(env!("OUT_DIR"), "/inst_builder.rs"));
/// Any type implementing `InstBuilderBase` gets all the `InstBuilder` methods for free.
impl<'f, T: InstBuilderBase<'f>> InstBuilder<'f> for T {}
/// Base trait for instruction inserters.
///
/// This is an alternative base trait for an instruction builder to implement.
///
/// An instruction inserter can be adapted into an instruction builder by wrapping it in an
/// `InsertBuilder`. This provides some common functionality for instruction builders that insert
/// new instructions, as opposed to the `ReplaceBuilder` which overwrites existing instructions.
pub trait InstInserterBase<'f>: Sized {
/// Get an immutable reference to the data flow graph.
fn data_flow_graph(&self) -> &DataFlowGraph;
/// Get a mutable reference to the data flow graph.
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph;
/// Insert a new instruction which belongs to the DFG.
fn insert_built_inst(self, inst: Inst, ctrl_typevar: Type) -> &'f mut DataFlowGraph;
}
use std::marker::PhantomData;
/// Builder that inserts an instruction at the current position.
///
/// An `InsertBuilder` is a wrapper for an `InstInserterBase` that turns it into an instruction
/// builder with some additional facilities for creating instructions that reuse existing values as
/// their results.
pub struct InsertBuilder<'f, IIB: InstInserterBase<'f>> {
inserter: IIB,
unused: PhantomData<&'f u32>,
}
impl<'f, IIB: InstInserterBase<'f>> InsertBuilder<'f, IIB> {
/// Create a new builder which inserts instructions at `pos`.
/// The `dfg` and `pos.layout` references should be from the same `Function`.
pub fn new(inserter: IIB) -> InsertBuilder<'f, IIB> {
InsertBuilder {
inserter,
unused: PhantomData,
}
}
/// Reuse result values in `reuse`.
///
/// Convert this builder into one that will reuse the provided result values instead of
/// allocating new ones. The provided values for reuse must not be attached to anything. Any
/// missing result values will be allocated as normal.
///
/// The `reuse` argument is expected to be an array of `Option<Value>`.
pub fn with_results<Array>(self, reuse: Array) -> InsertReuseBuilder<'f, IIB, Array>
where
Array: AsRef<[Option<Value>]>,
{
InsertReuseBuilder {
inserter: self.inserter,
reuse,
unused: PhantomData,
}
}
/// Reuse a single result value.
///
/// Convert this into a builder that will reuse `v` as the single result value. The reused
/// result value `v` must not be attached to anything.
///
/// This method should only be used when building an instruction with exactly one result. Use
/// `with_results()` for the more general case.
pub fn with_result(self, v: Value) -> InsertReuseBuilder<'f, IIB, [Option<Value>; 1]> {
// TODO: Specialize this to return a different builder that just attaches `v` instead of
// calling `make_inst_results_reusing()`.
self.with_results([Some(v)])
}
}
impl<'f, IIB: InstInserterBase<'f>> InstBuilderBase<'f> for InsertBuilder<'f, IIB> {
fn data_flow_graph(&self) -> &DataFlowGraph {
self.inserter.data_flow_graph()
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.inserter.data_flow_graph_mut()
}
fn build(mut self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
let inst;
{
let dfg = self.inserter.data_flow_graph_mut();
inst = dfg.make_inst(data);
dfg.make_inst_results(inst, ctrl_typevar);
}
(inst, self.inserter.insert_built_inst(inst, ctrl_typevar))
}
}
/// Builder that inserts a new instruction like `InsertBuilder`, but reusing result values.
pub struct InsertReuseBuilder<'f, IIB, Array>
where
IIB: InstInserterBase<'f>,
Array: AsRef<[Option<Value>]>,
{
inserter: IIB,
reuse: Array,
unused: PhantomData<&'f u32>,
}
impl<'f, IIB, Array> InstBuilderBase<'f> for InsertReuseBuilder<'f, IIB, Array>
where IIB: InstInserterBase<'f>,
Array: AsRef<[Option<Value>]>
{
fn | (&self) -> &DataFlowGraph {
self.inserter.data_flow_graph()
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.inserter.data_flow_graph_mut()
}
fn build(mut self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
let inst;
{
let dfg = self.inserter.data_flow_graph_mut();
inst = dfg.make_inst(data);
// Make an `Interator<Item = Option<Value>>`.
let ru = self.reuse.as_ref().iter().cloned();
dfg.make_inst_results_reusing(inst, ctrl_typevar, ru);
}
(inst, self.inserter.insert_built_inst(inst, ctrl_typevar))
}
}
/// Instruction builder that replaces an existing instruction.
///
/// The inserted instruction will have the same `Inst` number as the old one.
///
/// If the old instruction still has result values attached, it is assumed that the new instruction
/// produces the same number and types of results. The old result values are preserved. If the
/// replacement instruction format does not support multiple results, the builder panics. It is a
/// bug to leave result values dangling.
pub struct ReplaceBuilder<'f> {
dfg: &'f mut DataFlowGraph,
inst: Inst,
}
impl<'f> ReplaceBuilder<'f> {
/// Create a `ReplaceBuilder` that will overwrite `inst`.
pub fn new(dfg: &'f mut DataFlowGraph, inst: Inst) -> ReplaceBuilder {
ReplaceBuilder { dfg, inst }
}
}
impl<'f> InstBuilderBase<'f> for ReplaceBuilder<'f> {
fn data_flow_graph(&self) -> &DataFlowGraph {
self.dfg
}
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
self.dfg
}
fn build(self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'f mut DataFlowGraph) {
// Splat the new instruction on top of the old one.
self.dfg[self.inst] = data;
if!self.dfg.has_results(self.inst) {
// The old result values were either detached or non-existent.
// Construct new ones.
self.dfg.make_inst_results(self.inst, ctrl_typevar);
}
(self.inst, self.dfg)
}
}
#[cfg(test)]
mod tests {
use cursor::{Cursor, FuncCursor};
use ir::{Function, InstBuilder, ValueDef};
use ir::types::*;
use ir::condcodes::*;
#[test]
fn types() {
let mut func = Function::new();
let ebb0 = func.dfg.make_ebb();
let arg0 = func.dfg.append_ebb_param(ebb0, I32);
let mut pos = FuncCursor::new(&mut func);
pos.insert_ebb(ebb0);
// Explicit types.
let v0 = pos.ins().iconst(I32, 3);
assert_eq!(pos.func.dfg.value_type(v0), I32);
// Inferred from inputs.
let v1 = pos.ins().iadd(arg0, v0);
assert_eq!(pos.func.dfg.value_type(v1), I32);
// Formula.
let cmp = pos.ins().icmp(IntCC::Equal, arg0, v0);
assert_eq!(pos.func.dfg.value_type(cmp), B1);
}
#[test]
fn reuse_results() {
let mut func = Function::new();
let ebb0 = func.dfg.make_ebb();
let arg0 = func.dfg.append_ebb_param(ebb0, I32);
let mut pos = FuncCursor::new(&mut func);
pos.insert_ebb(ebb0);
let v0 = pos.ins().iadd_imm(arg0, 17);
assert_eq!(pos.func.dfg.value_type(v0), I32);
let iadd = pos.prev_inst().unwrap();
assert_eq!(pos.func.dfg.value_def(v0), ValueDef::Result(iadd, 0));
// Detach v0 and reuse it for a different instruction.
pos.func.dfg.clear_results(iadd);
let v0b = pos.ins().with_result(v0).iconst(I32, 3);
assert_eq!(v0, v0b);
assert_eq!(pos.current_inst(), Some(iadd));
let iconst = pos.prev_inst().unwrap();
assert!(iadd!= iconst);
assert_eq!(pos.func.dfg.value_def(v0), ValueDef::Result(iconst, 0));
}
}
| data_flow_graph | identifier_name |
cursor.rs | extern crate sdl2;
use std::env;
use std::path::Path;
use sdl2::event::Event;
use sdl2::image::{LoadSurface, InitFlag};
use sdl2::keyboard::Keycode;
use sdl2::mouse::Cursor;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::surface::Surface;
pub fn run(png: &Path) -> Result<(), String> {
let sdl_context = sdl2::init()?;
let video_subsystem = sdl_context.video()?;
let _image_context = sdl2::image::init(InitFlag::PNG | InitFlag::JPG)?;
let window = video_subsystem.window("rust-sdl2 demo: Cursor", 800, 600)
.position_centered()
.build()
.map_err(|e| e.to_string())?;
let mut canvas = window.into_canvas().software().build().map_err(|e| e.to_string())?;
let surface = Surface::from_file(png)
.map_err(|err| format!("failed to load cursor image: {}", err))?;
let cursor = Cursor::from_surface(surface, 0, 0)
.map_err(|err| format!("failed to load cursor: {}", err))?;
cursor.set();
canvas.clear();
canvas.present();
canvas.set_draw_color(Color::RGBA(255, 255, 255, 255));
let mut events = sdl_context.event_pump()?;
'mainloop: loop {
for event in events.poll_iter() {
match event {
Event::Quit{..} |
Event::KeyDown {keycode: Option::Some(Keycode::Escape),..} =>
break'mainloop,
Event::MouseButtonDown {x, y,..} => {
canvas.fill_rect(Rect::new(x, y, 1, 1))?;
canvas.present();
}
_ => {}
}
}
}
Ok(())
}
fn | () -> Result<(), String> {
let args: Vec<_> = env::args().collect();
if args.len() < 2 {
println!("Usage: cargo run /path/to/image.(png|jpg)")
} else {
run(Path::new(&args[1]))?;
}
Ok(())
}
| main | identifier_name |
cursor.rs | extern crate sdl2;
use std::env;
use std::path::Path; | use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::surface::Surface;
pub fn run(png: &Path) -> Result<(), String> {
let sdl_context = sdl2::init()?;
let video_subsystem = sdl_context.video()?;
let _image_context = sdl2::image::init(InitFlag::PNG | InitFlag::JPG)?;
let window = video_subsystem.window("rust-sdl2 demo: Cursor", 800, 600)
.position_centered()
.build()
.map_err(|e| e.to_string())?;
let mut canvas = window.into_canvas().software().build().map_err(|e| e.to_string())?;
let surface = Surface::from_file(png)
.map_err(|err| format!("failed to load cursor image: {}", err))?;
let cursor = Cursor::from_surface(surface, 0, 0)
.map_err(|err| format!("failed to load cursor: {}", err))?;
cursor.set();
canvas.clear();
canvas.present();
canvas.set_draw_color(Color::RGBA(255, 255, 255, 255));
let mut events = sdl_context.event_pump()?;
'mainloop: loop {
for event in events.poll_iter() {
match event {
Event::Quit{..} |
Event::KeyDown {keycode: Option::Some(Keycode::Escape),..} =>
break'mainloop,
Event::MouseButtonDown {x, y,..} => {
canvas.fill_rect(Rect::new(x, y, 1, 1))?;
canvas.present();
}
_ => {}
}
}
}
Ok(())
}
fn main() -> Result<(), String> {
let args: Vec<_> = env::args().collect();
if args.len() < 2 {
println!("Usage: cargo run /path/to/image.(png|jpg)")
} else {
run(Path::new(&args[1]))?;
}
Ok(())
} | use sdl2::event::Event;
use sdl2::image::{LoadSurface, InitFlag};
use sdl2::keyboard::Keycode;
use sdl2::mouse::Cursor; | random_line_split |
cursor.rs | extern crate sdl2;
use std::env;
use std::path::Path;
use sdl2::event::Event;
use sdl2::image::{LoadSurface, InitFlag};
use sdl2::keyboard::Keycode;
use sdl2::mouse::Cursor;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::surface::Surface;
pub fn run(png: &Path) -> Result<(), String> {
let sdl_context = sdl2::init()?;
let video_subsystem = sdl_context.video()?;
let _image_context = sdl2::image::init(InitFlag::PNG | InitFlag::JPG)?;
let window = video_subsystem.window("rust-sdl2 demo: Cursor", 800, 600)
.position_centered()
.build()
.map_err(|e| e.to_string())?;
let mut canvas = window.into_canvas().software().build().map_err(|e| e.to_string())?;
let surface = Surface::from_file(png)
.map_err(|err| format!("failed to load cursor image: {}", err))?;
let cursor = Cursor::from_surface(surface, 0, 0)
.map_err(|err| format!("failed to load cursor: {}", err))?;
cursor.set();
canvas.clear();
canvas.present();
canvas.set_draw_color(Color::RGBA(255, 255, 255, 255));
let mut events = sdl_context.event_pump()?;
'mainloop: loop {
for event in events.poll_iter() {
match event {
Event::Quit{..} |
Event::KeyDown {keycode: Option::Some(Keycode::Escape),..} =>
break'mainloop,
Event::MouseButtonDown {x, y,..} => {
canvas.fill_rect(Rect::new(x, y, 1, 1))?;
canvas.present();
}
_ => {}
}
}
}
Ok(())
}
fn main() -> Result<(), String> | {
let args: Vec<_> = env::args().collect();
if args.len() < 2 {
println!("Usage: cargo run /path/to/image.(png|jpg)")
} else {
run(Path::new(&args[1]))?;
}
Ok(())
} | identifier_body |
|
cursor.rs | extern crate sdl2;
use std::env;
use std::path::Path;
use sdl2::event::Event;
use sdl2::image::{LoadSurface, InitFlag};
use sdl2::keyboard::Keycode;
use sdl2::mouse::Cursor;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::surface::Surface;
pub fn run(png: &Path) -> Result<(), String> {
let sdl_context = sdl2::init()?;
let video_subsystem = sdl_context.video()?;
let _image_context = sdl2::image::init(InitFlag::PNG | InitFlag::JPG)?;
let window = video_subsystem.window("rust-sdl2 demo: Cursor", 800, 600)
.position_centered()
.build()
.map_err(|e| e.to_string())?;
let mut canvas = window.into_canvas().software().build().map_err(|e| e.to_string())?;
let surface = Surface::from_file(png)
.map_err(|err| format!("failed to load cursor image: {}", err))?;
let cursor = Cursor::from_surface(surface, 0, 0)
.map_err(|err| format!("failed to load cursor: {}", err))?;
cursor.set();
canvas.clear();
canvas.present();
canvas.set_draw_color(Color::RGBA(255, 255, 255, 255));
let mut events = sdl_context.event_pump()?;
'mainloop: loop {
for event in events.poll_iter() {
match event {
Event::Quit{..} |
Event::KeyDown {keycode: Option::Some(Keycode::Escape),..} =>
break'mainloop,
Event::MouseButtonDown {x, y,..} => {
canvas.fill_rect(Rect::new(x, y, 1, 1))?;
canvas.present();
}
_ => |
}
}
}
Ok(())
}
fn main() -> Result<(), String> {
let args: Vec<_> = env::args().collect();
if args.len() < 2 {
println!("Usage: cargo run /path/to/image.(png|jpg)")
} else {
run(Path::new(&args[1]))?;
}
Ok(())
}
| {} | conditional_block |
thrift.rs | // Copyright 2019-2020 Twitter, Inc.
// Licensed under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
#![allow(dead_code)]
use crate::codec::ParseError;
pub const STOP: u8 = 0;
pub const VOID: u8 = 1;
pub const BOOL: u8 = 2;
pub const BYTE: u8 = 3;
pub const DOUBLE: u8 = 4;
pub const I16: u8 = 6;
pub const I32: u8 = 8;
pub const I64: u8 = 10;
pub const STRING: u8 = 11;
pub const STRUCT: u8 = 12;
pub const MAP: u8 = 13;
pub const SET: u8 = 14;
pub const LIST: u8 = 15;
#[derive(Clone)]
pub struct ThriftBuffer {
buffer: Vec<u8>,
}
impl Default for ThriftBuffer {
fn default() -> Self {
let mut buffer = Vec::<u8>::new();
buffer.resize(4, 0);
Self { buffer }
}
}
impl ThriftBuffer {
pub fn new() -> Self {
Self::default()
}
pub fn | (&self) -> usize {
self.buffer.len()
}
/// add protocol version to buffer
pub fn protocol_header(&mut self) -> &Self {
self.buffer.extend_from_slice(&[128, 1, 0, 1]);
self
}
/// write the framed length to the buffer
#[inline]
pub fn frame(&mut self) -> &Self {
let bytes = self.buffer.len() - 4;
for (p, i) in (bytes as i32).to_be_bytes().iter().enumerate() {
self.buffer[p] = *i;
}
self
}
/// add method name to buffer
#[inline]
pub fn method_name(&mut self, method: &str) -> &Self {
self.write_str(method)
}
/// add sequence id to buffer
#[inline]
pub fn sequence_id(&mut self, id: i32) -> &Self {
self.write_i32(id as i32)
}
/// add stop sequence to buffer
pub fn stop(&mut self) -> &Self {
self.write_bytes(&[STOP])
}
// write an i16 to the buffer
#[inline]
pub fn write_i16(&mut self, value: i16) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i32 to the buffer
#[inline]
pub fn write_i32(&mut self, value: i32) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i64 to the buffer
#[inline]
pub fn write_i64(&mut self, value: i64) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write a literal byte sequence to the buffer
#[inline]
pub fn write_bytes(&mut self, bytes: &[u8]) -> &Self {
self.buffer.extend_from_slice(bytes);
self
}
// write bool to the buffer
#[inline]
pub fn write_bool(&mut self, b: bool) -> &Self {
self.buffer.extend_from_slice(&[(b as u8)]);
self
}
#[inline]
pub fn write_str(&mut self, string: &str) -> &Self {
let string = string.as_bytes();
self.write_i32(string.len() as i32);
self.buffer.extend_from_slice(string);
self
}
pub fn as_bytes(&self) -> &[u8] {
&self.buffer
}
}
fn decode(buf: &[u8]) -> Result<(), ParseError> {
let bytes = buf.len() as u32;
if bytes > 4 {
let length = u32::from_be_bytes([buf[0], buf[1], buf[2], buf[3]]);
match length.checked_add(4_u32) {
Some(b) => {
if b == bytes {
Ok(())
} else {
Err(ParseError::Incomplete)
}
}
None => Err(ParseError::Unknown),
}
} else {
Err(ParseError::Incomplete)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn ping() {
let mut buffer = ThriftBuffer::new();
// new buffer has 4 bytes to hold framing later
assert_eq!(buffer.len(), 4);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0]);
buffer.protocol_header();
assert_eq!(buffer.len(), 8);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0, 128, 1, 0, 1]);
buffer.method_name("ping");
assert_eq!(buffer.len(), 16);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103]
);
buffer.sequence_id(0);
assert_eq!(buffer.len(), 20);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0]
);
buffer.stop();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
buffer.frame();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 17, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
assert_eq!(decode(buffer.as_bytes()), Ok(()));
}
}
| len | identifier_name |
thrift.rs | // Copyright 2019-2020 Twitter, Inc.
// Licensed under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
#![allow(dead_code)]
use crate::codec::ParseError;
pub const STOP: u8 = 0;
pub const VOID: u8 = 1;
pub const BOOL: u8 = 2;
pub const BYTE: u8 = 3;
pub const DOUBLE: u8 = 4;
pub const I16: u8 = 6;
pub const I32: u8 = 8;
pub const I64: u8 = 10;
pub const STRING: u8 = 11;
pub const STRUCT: u8 = 12;
pub const MAP: u8 = 13;
pub const SET: u8 = 14;
pub const LIST: u8 = 15;
#[derive(Clone)]
pub struct ThriftBuffer {
buffer: Vec<u8>,
}
impl Default for ThriftBuffer {
fn default() -> Self {
let mut buffer = Vec::<u8>::new();
buffer.resize(4, 0);
Self { buffer }
}
}
impl ThriftBuffer {
pub fn new() -> Self {
Self::default()
}
pub fn len(&self) -> usize {
self.buffer.len()
}
/// add protocol version to buffer
pub fn protocol_header(&mut self) -> &Self {
self.buffer.extend_from_slice(&[128, 1, 0, 1]);
self
}
/// write the framed length to the buffer
#[inline]
pub fn frame(&mut self) -> &Self {
let bytes = self.buffer.len() - 4;
for (p, i) in (bytes as i32).to_be_bytes().iter().enumerate() {
self.buffer[p] = *i;
}
self
}
/// add method name to buffer
#[inline]
pub fn method_name(&mut self, method: &str) -> &Self {
self.write_str(method)
}
/// add sequence id to buffer
#[inline]
pub fn sequence_id(&mut self, id: i32) -> &Self {
self.write_i32(id as i32)
}
/// add stop sequence to buffer
pub fn stop(&mut self) -> &Self {
self.write_bytes(&[STOP])
}
// write an i16 to the buffer
#[inline]
pub fn write_i16(&mut self, value: i16) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i32 to the buffer
#[inline]
pub fn write_i32(&mut self, value: i32) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i64 to the buffer
#[inline]
pub fn write_i64(&mut self, value: i64) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write a literal byte sequence to the buffer
#[inline]
pub fn write_bytes(&mut self, bytes: &[u8]) -> &Self {
self.buffer.extend_from_slice(bytes);
self
}
// write bool to the buffer
#[inline]
pub fn write_bool(&mut self, b: bool) -> &Self {
self.buffer.extend_from_slice(&[(b as u8)]);
self
}
#[inline]
pub fn write_str(&mut self, string: &str) -> &Self {
let string = string.as_bytes();
self.write_i32(string.len() as i32);
self.buffer.extend_from_slice(string);
self
}
pub fn as_bytes(&self) -> &[u8] {
&self.buffer
}
}
fn decode(buf: &[u8]) -> Result<(), ParseError> {
let bytes = buf.len() as u32;
if bytes > 4 {
let length = u32::from_be_bytes([buf[0], buf[1], buf[2], buf[3]]);
match length.checked_add(4_u32) {
Some(b) => {
if b == bytes {
Ok(())
} else {
Err(ParseError::Incomplete)
}
}
None => Err(ParseError::Unknown),
}
} else { | }
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn ping() {
let mut buffer = ThriftBuffer::new();
// new buffer has 4 bytes to hold framing later
assert_eq!(buffer.len(), 4);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0]);
buffer.protocol_header();
assert_eq!(buffer.len(), 8);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0, 128, 1, 0, 1]);
buffer.method_name("ping");
assert_eq!(buffer.len(), 16);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103]
);
buffer.sequence_id(0);
assert_eq!(buffer.len(), 20);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0]
);
buffer.stop();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
buffer.frame();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 17, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
assert_eq!(decode(buffer.as_bytes()), Ok(()));
}
} | Err(ParseError::Incomplete) | random_line_split |
thrift.rs | // Copyright 2019-2020 Twitter, Inc.
// Licensed under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
#![allow(dead_code)]
use crate::codec::ParseError;
pub const STOP: u8 = 0;
pub const VOID: u8 = 1;
pub const BOOL: u8 = 2;
pub const BYTE: u8 = 3;
pub const DOUBLE: u8 = 4;
pub const I16: u8 = 6;
pub const I32: u8 = 8;
pub const I64: u8 = 10;
pub const STRING: u8 = 11;
pub const STRUCT: u8 = 12;
pub const MAP: u8 = 13;
pub const SET: u8 = 14;
pub const LIST: u8 = 15;
#[derive(Clone)]
pub struct ThriftBuffer {
buffer: Vec<u8>,
}
impl Default for ThriftBuffer {
fn default() -> Self {
let mut buffer = Vec::<u8>::new();
buffer.resize(4, 0);
Self { buffer }
}
}
impl ThriftBuffer {
pub fn new() -> Self {
Self::default()
}
pub fn len(&self) -> usize {
self.buffer.len()
}
/// add protocol version to buffer
pub fn protocol_header(&mut self) -> &Self {
self.buffer.extend_from_slice(&[128, 1, 0, 1]);
self
}
/// write the framed length to the buffer
#[inline]
pub fn frame(&mut self) -> &Self {
let bytes = self.buffer.len() - 4;
for (p, i) in (bytes as i32).to_be_bytes().iter().enumerate() {
self.buffer[p] = *i;
}
self
}
/// add method name to buffer
#[inline]
pub fn method_name(&mut self, method: &str) -> &Self {
self.write_str(method)
}
/// add sequence id to buffer
#[inline]
pub fn sequence_id(&mut self, id: i32) -> &Self {
self.write_i32(id as i32)
}
/// add stop sequence to buffer
pub fn stop(&mut self) -> &Self {
self.write_bytes(&[STOP])
}
// write an i16 to the buffer
#[inline]
pub fn write_i16(&mut self, value: i16) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i32 to the buffer
#[inline]
pub fn write_i32(&mut self, value: i32) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i64 to the buffer
#[inline]
pub fn write_i64(&mut self, value: i64) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write a literal byte sequence to the buffer
#[inline]
pub fn write_bytes(&mut self, bytes: &[u8]) -> &Self {
self.buffer.extend_from_slice(bytes);
self
}
// write bool to the buffer
#[inline]
pub fn write_bool(&mut self, b: bool) -> &Self {
self.buffer.extend_from_slice(&[(b as u8)]);
self
}
#[inline]
pub fn write_str(&mut self, string: &str) -> &Self {
let string = string.as_bytes();
self.write_i32(string.len() as i32);
self.buffer.extend_from_slice(string);
self
}
pub fn as_bytes(&self) -> &[u8] {
&self.buffer
}
}
fn decode(buf: &[u8]) -> Result<(), ParseError> {
let bytes = buf.len() as u32;
if bytes > 4 {
let length = u32::from_be_bytes([buf[0], buf[1], buf[2], buf[3]]);
match length.checked_add(4_u32) {
Some(b) => {
if b == bytes | else {
Err(ParseError::Incomplete)
}
}
None => Err(ParseError::Unknown),
}
} else {
Err(ParseError::Incomplete)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn ping() {
let mut buffer = ThriftBuffer::new();
// new buffer has 4 bytes to hold framing later
assert_eq!(buffer.len(), 4);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0]);
buffer.protocol_header();
assert_eq!(buffer.len(), 8);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0, 128, 1, 0, 1]);
buffer.method_name("ping");
assert_eq!(buffer.len(), 16);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103]
);
buffer.sequence_id(0);
assert_eq!(buffer.len(), 20);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0]
);
buffer.stop();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
buffer.frame();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 17, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
assert_eq!(decode(buffer.as_bytes()), Ok(()));
}
}
| {
Ok(())
} | conditional_block |
thrift.rs | // Copyright 2019-2020 Twitter, Inc.
// Licensed under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
#![allow(dead_code)]
use crate::codec::ParseError;
pub const STOP: u8 = 0;
pub const VOID: u8 = 1;
pub const BOOL: u8 = 2;
pub const BYTE: u8 = 3;
pub const DOUBLE: u8 = 4;
pub const I16: u8 = 6;
pub const I32: u8 = 8;
pub const I64: u8 = 10;
pub const STRING: u8 = 11;
pub const STRUCT: u8 = 12;
pub const MAP: u8 = 13;
pub const SET: u8 = 14;
pub const LIST: u8 = 15;
#[derive(Clone)]
pub struct ThriftBuffer {
buffer: Vec<u8>,
}
impl Default for ThriftBuffer {
fn default() -> Self {
let mut buffer = Vec::<u8>::new();
buffer.resize(4, 0);
Self { buffer }
}
}
impl ThriftBuffer {
pub fn new() -> Self {
Self::default()
}
pub fn len(&self) -> usize |
/// add protocol version to buffer
pub fn protocol_header(&mut self) -> &Self {
self.buffer.extend_from_slice(&[128, 1, 0, 1]);
self
}
/// write the framed length to the buffer
#[inline]
pub fn frame(&mut self) -> &Self {
let bytes = self.buffer.len() - 4;
for (p, i) in (bytes as i32).to_be_bytes().iter().enumerate() {
self.buffer[p] = *i;
}
self
}
/// add method name to buffer
#[inline]
pub fn method_name(&mut self, method: &str) -> &Self {
self.write_str(method)
}
/// add sequence id to buffer
#[inline]
pub fn sequence_id(&mut self, id: i32) -> &Self {
self.write_i32(id as i32)
}
/// add stop sequence to buffer
pub fn stop(&mut self) -> &Self {
self.write_bytes(&[STOP])
}
// write an i16 to the buffer
#[inline]
pub fn write_i16(&mut self, value: i16) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i32 to the buffer
#[inline]
pub fn write_i32(&mut self, value: i32) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write an i64 to the buffer
#[inline]
pub fn write_i64(&mut self, value: i64) -> &Self {
self.buffer.extend_from_slice(&value.to_be_bytes());
self
}
// write a literal byte sequence to the buffer
#[inline]
pub fn write_bytes(&mut self, bytes: &[u8]) -> &Self {
self.buffer.extend_from_slice(bytes);
self
}
// write bool to the buffer
#[inline]
pub fn write_bool(&mut self, b: bool) -> &Self {
self.buffer.extend_from_slice(&[(b as u8)]);
self
}
#[inline]
pub fn write_str(&mut self, string: &str) -> &Self {
let string = string.as_bytes();
self.write_i32(string.len() as i32);
self.buffer.extend_from_slice(string);
self
}
pub fn as_bytes(&self) -> &[u8] {
&self.buffer
}
}
fn decode(buf: &[u8]) -> Result<(), ParseError> {
let bytes = buf.len() as u32;
if bytes > 4 {
let length = u32::from_be_bytes([buf[0], buf[1], buf[2], buf[3]]);
match length.checked_add(4_u32) {
Some(b) => {
if b == bytes {
Ok(())
} else {
Err(ParseError::Incomplete)
}
}
None => Err(ParseError::Unknown),
}
} else {
Err(ParseError::Incomplete)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn ping() {
let mut buffer = ThriftBuffer::new();
// new buffer has 4 bytes to hold framing later
assert_eq!(buffer.len(), 4);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0]);
buffer.protocol_header();
assert_eq!(buffer.len(), 8);
assert_eq!(buffer.as_bytes(), &[0, 0, 0, 0, 128, 1, 0, 1]);
buffer.method_name("ping");
assert_eq!(buffer.len(), 16);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103]
);
buffer.sequence_id(0);
assert_eq!(buffer.len(), 20);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0]
);
buffer.stop();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 0, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
buffer.frame();
assert_eq!(buffer.len(), 21);
assert_eq!(
buffer.as_bytes(),
&[0, 0, 0, 17, 128, 1, 0, 1, 0, 0, 0, 4, 112, 105, 110, 103, 0, 0, 0, 0, 0]
);
assert_eq!(decode(buffer.as_bytes()), Ok(()));
}
}
| {
self.buffer.len()
} | identifier_body |
size_of.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use selectors;
use servo_arc::Arc;
use style;
use style::applicable_declarations::ApplicableDeclarationBlock;
use style::data::{ElementData, ElementStyles};
use style::gecko::selector_parser::{self, SelectorImpl};
use style::properties::ComputedValues;
use style::rule_tree::{RuleNode, StrongRuleNode};
use style::values::computed;
use style::values::specified;
size_of_test!(size_of_selector, selectors::parser::Selector<SelectorImpl>, 8);
size_of_test!(size_of_pseudo_element, selector_parser::PseudoElement, 24);
size_of_test!(size_of_component, selectors::parser::Component<SelectorImpl>, 32);
size_of_test!(size_of_pseudo_class, selector_parser::NonTSPseudoClass, 24);
// The size of this is critical to performance on the bloom-basic microbenchmark.
// When iterating over a large Rule array, we want to be able to fast-reject
// selectors (with the inline hashes) with as few cache misses as possible.
size_of_test!(test_size_of_rule, style::stylist::Rule, 32);
// Large pages generate tens of thousands of ComputedValues.
size_of_test!(test_size_of_cv, ComputedValues, 248);
size_of_test!(test_size_of_option_arc_cv, Option<Arc<ComputedValues>>, 8);
size_of_test!(test_size_of_option_rule_node, Option<StrongRuleNode>, 8);
size_of_test!(test_size_of_element_styles, ElementStyles, 16);
size_of_test!(test_size_of_element_data, ElementData, 24);
size_of_test!(test_size_of_property_declaration, style::properties::PropertyDeclaration, 32);
size_of_test!(test_size_of_application_declaration_block, ApplicableDeclarationBlock, 24);
// FIXME(bholley): This can shrink with a little bit of work.
// See https://github.com/servo/servo/issues/17280
size_of_test!(test_size_of_rule_node, RuleNode, 80);
// This is huge, but we allocate it on the stack and then never move it,
// we only pass `&mut SourcePropertyDeclaration` references around.
size_of_test!(test_size_of_parsed_declaration, style::properties::SourcePropertyDeclaration, 576);
size_of_test!(test_size_of_computed_image, computed::image::Image, 40);
size_of_test!(test_size_of_specified_image, specified::image::Image, 40);
// FIXME(bz): These can shrink if we move the None_ value inside the
// enum instead of paying an extra word for the Either discriminant.
size_of_test!(test_size_of_computed_image_layer, computed::image::ImageLayer, | if cfg!(rustc_has_pr45225) { 40 } else { 48 });
size_of_test!(test_size_of_specified_image_layer, specified::image::ImageLayer,
if cfg!(rustc_has_pr45225) { 40 } else { 48 }); | random_line_split |
|
main.rs | use rand::Rng;
use std::cmp::Ordering;
use std::io;
fn main() {
println!("Guess the number!");
let secret_number = rand::thread_rng().gen_range(1..101);
println!("The secret number is: {}", secret_number);
loop {
println!("Please input your guess.");
let mut guess = String::new();
// ANCHOR: here
// --snip--
io::stdin()
.read_line(&mut guess)
.expect("Failed to read line"); | let guess: u32 = match guess.trim().parse() {
Ok(num) => num,
Err(_) => continue,
};
// ANCHOR_END: ch19
println!("You guessed: {}", guess);
// --snip--
// ANCHOR_END: here
match guess.cmp(&secret_number) {
Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => {
println!("You win!");
break;
}
}
}
} |
// ANCHOR: ch19 | random_line_split |
main.rs | use rand::Rng;
use std::cmp::Ordering;
use std::io;
fn main() | let guess: u32 = match guess.trim().parse() {
Ok(num) => num,
Err(_) => continue,
};
// ANCHOR_END: ch19
println!("You guessed: {}", guess);
// --snip--
// ANCHOR_END: here
match guess.cmp(&secret_number) {
Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => {
println!("You win!");
break;
}
}
}
}
| {
println!("Guess the number!");
let secret_number = rand::thread_rng().gen_range(1..101);
println!("The secret number is: {}", secret_number);
loop {
println!("Please input your guess.");
let mut guess = String::new();
// ANCHOR: here
// --snip--
io::stdin()
.read_line(&mut guess)
.expect("Failed to read line");
// ANCHOR: ch19 | identifier_body |
main.rs | use rand::Rng;
use std::cmp::Ordering;
use std::io;
fn | () {
println!("Guess the number!");
let secret_number = rand::thread_rng().gen_range(1..101);
println!("The secret number is: {}", secret_number);
loop {
println!("Please input your guess.");
let mut guess = String::new();
// ANCHOR: here
// --snip--
io::stdin()
.read_line(&mut guess)
.expect("Failed to read line");
// ANCHOR: ch19
let guess: u32 = match guess.trim().parse() {
Ok(num) => num,
Err(_) => continue,
};
// ANCHOR_END: ch19
println!("You guessed: {}", guess);
// --snip--
// ANCHOR_END: here
match guess.cmp(&secret_number) {
Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => {
println!("You win!");
break;
}
}
}
}
| main | identifier_name |
fuzzing.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
constants,
peer::Peer,
protocols::wire::{
handshake::v1::{MessagingProtocolVersion, SupportedProtocols},
messaging::v1::{NetworkMessage, NetworkMessageSink},
},
testutils::fake_socket::ReadOnlyTestSocketVec,
transport::{Connection, ConnectionId, ConnectionMetadata},
ProtocolId,
};
use channel::{diem_channel, message_queues::QueueStyle};
use diem_config::{config::PeerRole, network_id::NetworkContext};
use diem_proptest_helpers::ValueGenerator;
use diem_time_service::TimeService;
use diem_types::{network_address::NetworkAddress, PeerId};
use futures::{executor::block_on, future, io::AsyncReadExt, sink::SinkExt, stream::StreamExt};
use memsocket::MemorySocket;
use netcore::transport::ConnectionOrigin;
use proptest::{arbitrary::any, collection::vec};
use std::time::Duration;
/// Generate a sequence of `NetworkMessage`, bcs serialize them, and write them
/// out to a buffer using our length-prefixed message codec.
pub fn generate_corpus(gen: &mut ValueGenerator) -> Vec<u8> {
let network_msgs = gen.generate(vec(any::<NetworkMessage>(), 1..20));
let (write_socket, mut read_socket) = MemorySocket::new_pair();
let mut writer = NetworkMessageSink::new(write_socket, constants::MAX_FRAME_SIZE, None);
// Write the `NetworkMessage`s to a fake socket
let f_send = async move {
for network_msg in &network_msgs {
writer.send(network_msg).await.unwrap();
}
};
// Read the serialized `NetworkMessage`s from the fake socket
let f_recv = async move {
let mut buf = Vec::new();
read_socket.read_to_end(&mut buf).await.unwrap();
buf
};
let (_, buf) = block_on(future::join(f_send, f_recv));
buf
}
/// Fuzz the `Peer` actor's inbound message handling.
///
/// For each fuzzer iteration, we spin up a new `Peer` actor and pipe the raw
/// fuzzer data into it. This mostly tests that the `Peer` inbound message handling
/// doesn't panic or leak memory when reading, deserializing, and handling messages
/// from remote peers.
pub fn fuzz(data: &[u8]) {
// Use the basic single-threaded runtime, since our current tokio version has
// a chance to leak memory and/or thread handles when using the threaded
// runtime and sometimes blocks when trying to shutdown the runtime.
//
// https://github.com/tokio-rs/tokio/pull/2649
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
let executor = rt.handle().clone();
// We want to choose a constant peer id for _our_ peer id, since we will
// generate unbounded metrics otherwise and OOM during fuzzing.
let peer_id = PeerId::ZERO;
// However, we want to choose a random _remote_ peer id to ensure we _don't_
// have metrics logging the remote peer id (which would eventually OOM in
// production for public-facing nodes).
let remote_peer_id = PeerId::random();
// Mock data
let network_context = NetworkContext::mock_with_peer_id(peer_id);
let socket = ReadOnlyTestSocketVec::new(data.to_vec());
let metadata = ConnectionMetadata::new(
remote_peer_id,
ConnectionId::from(123),
NetworkAddress::mock(),
ConnectionOrigin::Inbound,
MessagingProtocolVersion::V1,
SupportedProtocols::from(
[
ProtocolId::ConsensusRpc,
ProtocolId::ConsensusDirectSend,
ProtocolId::MempoolDirectSend,
ProtocolId::StateSyncDirectSend,
ProtocolId::DiscoveryDirectSend,
ProtocolId::HealthCheckerRpc,
]
.iter(),
),
PeerRole::Unknown,
);
let connection = Connection { socket, metadata };
let (connection_notifs_tx, connection_notifs_rx) = channel::new_test(8);
let channel_size = 8;
let (peer_reqs_tx, peer_reqs_rx) = diem_channel::new(QueueStyle::FIFO, channel_size, None);
let (peer_notifs_tx, peer_notifs_rx) = diem_channel::new(QueueStyle::FIFO, channel_size, None);
// Spin up a new `Peer` actor
let peer = Peer::new(
network_context,
executor.clone(),
TimeService::mock(),
connection,
connection_notifs_tx,
peer_reqs_rx,
peer_notifs_tx,
Duration::from_millis(constants::INBOUND_RPC_TIMEOUT_MS),
constants::MAX_CONCURRENT_INBOUND_RPCS,
constants::MAX_CONCURRENT_OUTBOUND_RPCS,
constants::MAX_FRAME_SIZE,
None,
None,
);
executor.spawn(peer.start());
rt.block_on(async move {
// Wait for "remote" to disconnect (we read all data and socket read
// returns EOF), we read a disconnect request, or we fail to deserialize
// something.
connection_notifs_rx.collect::<Vec<_>>().await;
| // for all network notifs to drain out and finish.
drop(peer_reqs_tx);
peer_notifs_rx.collect::<Vec<_>>().await;
});
}
#[test]
fn test_peer_fuzzers() {
let mut value_gen = ValueGenerator::deterministic();
for _ in 0..50 {
let corpus = generate_corpus(&mut value_gen);
fuzz(&corpus);
}
} | // ACK the "remote" d/c and drop our handle to the Peer actor. Then wait | random_line_split |
fuzzing.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
constants,
peer::Peer,
protocols::wire::{
handshake::v1::{MessagingProtocolVersion, SupportedProtocols},
messaging::v1::{NetworkMessage, NetworkMessageSink},
},
testutils::fake_socket::ReadOnlyTestSocketVec,
transport::{Connection, ConnectionId, ConnectionMetadata},
ProtocolId,
};
use channel::{diem_channel, message_queues::QueueStyle};
use diem_config::{config::PeerRole, network_id::NetworkContext};
use diem_proptest_helpers::ValueGenerator;
use diem_time_service::TimeService;
use diem_types::{network_address::NetworkAddress, PeerId};
use futures::{executor::block_on, future, io::AsyncReadExt, sink::SinkExt, stream::StreamExt};
use memsocket::MemorySocket;
use netcore::transport::ConnectionOrigin;
use proptest::{arbitrary::any, collection::vec};
use std::time::Duration;
/// Generate a sequence of `NetworkMessage`, bcs serialize them, and write them
/// out to a buffer using our length-prefixed message codec.
pub fn generate_corpus(gen: &mut ValueGenerator) -> Vec<u8> | buf
}
/// Fuzz the `Peer` actor's inbound message handling.
///
/// For each fuzzer iteration, we spin up a new `Peer` actor and pipe the raw
/// fuzzer data into it. This mostly tests that the `Peer` inbound message handling
/// doesn't panic or leak memory when reading, deserializing, and handling messages
/// from remote peers.
pub fn fuzz(data: &[u8]) {
// Use the basic single-threaded runtime, since our current tokio version has
// a chance to leak memory and/or thread handles when using the threaded
// runtime and sometimes blocks when trying to shutdown the runtime.
//
// https://github.com/tokio-rs/tokio/pull/2649
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
let executor = rt.handle().clone();
// We want to choose a constant peer id for _our_ peer id, since we will
// generate unbounded metrics otherwise and OOM during fuzzing.
let peer_id = PeerId::ZERO;
// However, we want to choose a random _remote_ peer id to ensure we _don't_
// have metrics logging the remote peer id (which would eventually OOM in
// production for public-facing nodes).
let remote_peer_id = PeerId::random();
// Mock data
let network_context = NetworkContext::mock_with_peer_id(peer_id);
let socket = ReadOnlyTestSocketVec::new(data.to_vec());
let metadata = ConnectionMetadata::new(
remote_peer_id,
ConnectionId::from(123),
NetworkAddress::mock(),
ConnectionOrigin::Inbound,
MessagingProtocolVersion::V1,
SupportedProtocols::from(
[
ProtocolId::ConsensusRpc,
ProtocolId::ConsensusDirectSend,
ProtocolId::MempoolDirectSend,
ProtocolId::StateSyncDirectSend,
ProtocolId::DiscoveryDirectSend,
ProtocolId::HealthCheckerRpc,
]
.iter(),
),
PeerRole::Unknown,
);
let connection = Connection { socket, metadata };
let (connection_notifs_tx, connection_notifs_rx) = channel::new_test(8);
let channel_size = 8;
let (peer_reqs_tx, peer_reqs_rx) = diem_channel::new(QueueStyle::FIFO, channel_size, None);
let (peer_notifs_tx, peer_notifs_rx) = diem_channel::new(QueueStyle::FIFO, channel_size, None);
// Spin up a new `Peer` actor
let peer = Peer::new(
network_context,
executor.clone(),
TimeService::mock(),
connection,
connection_notifs_tx,
peer_reqs_rx,
peer_notifs_tx,
Duration::from_millis(constants::INBOUND_RPC_TIMEOUT_MS),
constants::MAX_CONCURRENT_INBOUND_RPCS,
constants::MAX_CONCURRENT_OUTBOUND_RPCS,
constants::MAX_FRAME_SIZE,
None,
None,
);
executor.spawn(peer.start());
rt.block_on(async move {
// Wait for "remote" to disconnect (we read all data and socket read
// returns EOF), we read a disconnect request, or we fail to deserialize
// something.
connection_notifs_rx.collect::<Vec<_>>().await;
// ACK the "remote" d/c and drop our handle to the Peer actor. Then wait
// for all network notifs to drain out and finish.
drop(peer_reqs_tx);
peer_notifs_rx.collect::<Vec<_>>().await;
});
}
#[test]
fn test_peer_fuzzers() {
let mut value_gen = ValueGenerator::deterministic();
for _ in 0..50 {
let corpus = generate_corpus(&mut value_gen);
fuzz(&corpus);
}
}
| {
let network_msgs = gen.generate(vec(any::<NetworkMessage>(), 1..20));
let (write_socket, mut read_socket) = MemorySocket::new_pair();
let mut writer = NetworkMessageSink::new(write_socket, constants::MAX_FRAME_SIZE, None);
// Write the `NetworkMessage`s to a fake socket
let f_send = async move {
for network_msg in &network_msgs {
writer.send(network_msg).await.unwrap();
}
};
// Read the serialized `NetworkMessage`s from the fake socket
let f_recv = async move {
let mut buf = Vec::new();
read_socket.read_to_end(&mut buf).await.unwrap();
buf
};
let (_, buf) = block_on(future::join(f_send, f_recv)); | identifier_body |
fuzzing.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
constants,
peer::Peer,
protocols::wire::{
handshake::v1::{MessagingProtocolVersion, SupportedProtocols},
messaging::v1::{NetworkMessage, NetworkMessageSink},
},
testutils::fake_socket::ReadOnlyTestSocketVec,
transport::{Connection, ConnectionId, ConnectionMetadata},
ProtocolId,
};
use channel::{diem_channel, message_queues::QueueStyle};
use diem_config::{config::PeerRole, network_id::NetworkContext};
use diem_proptest_helpers::ValueGenerator;
use diem_time_service::TimeService;
use diem_types::{network_address::NetworkAddress, PeerId};
use futures::{executor::block_on, future, io::AsyncReadExt, sink::SinkExt, stream::StreamExt};
use memsocket::MemorySocket;
use netcore::transport::ConnectionOrigin;
use proptest::{arbitrary::any, collection::vec};
use std::time::Duration;
/// Generate a sequence of `NetworkMessage`, bcs serialize them, and write them
/// out to a buffer using our length-prefixed message codec.
pub fn generate_corpus(gen: &mut ValueGenerator) -> Vec<u8> {
let network_msgs = gen.generate(vec(any::<NetworkMessage>(), 1..20));
let (write_socket, mut read_socket) = MemorySocket::new_pair();
let mut writer = NetworkMessageSink::new(write_socket, constants::MAX_FRAME_SIZE, None);
// Write the `NetworkMessage`s to a fake socket
let f_send = async move {
for network_msg in &network_msgs {
writer.send(network_msg).await.unwrap();
}
};
// Read the serialized `NetworkMessage`s from the fake socket
let f_recv = async move {
let mut buf = Vec::new();
read_socket.read_to_end(&mut buf).await.unwrap();
buf
};
let (_, buf) = block_on(future::join(f_send, f_recv));
buf
}
/// Fuzz the `Peer` actor's inbound message handling.
///
/// For each fuzzer iteration, we spin up a new `Peer` actor and pipe the raw
/// fuzzer data into it. This mostly tests that the `Peer` inbound message handling
/// doesn't panic or leak memory when reading, deserializing, and handling messages
/// from remote peers.
pub fn fuzz(data: &[u8]) {
// Use the basic single-threaded runtime, since our current tokio version has
// a chance to leak memory and/or thread handles when using the threaded
// runtime and sometimes blocks when trying to shutdown the runtime.
//
// https://github.com/tokio-rs/tokio/pull/2649
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
let executor = rt.handle().clone();
// We want to choose a constant peer id for _our_ peer id, since we will
// generate unbounded metrics otherwise and OOM during fuzzing.
let peer_id = PeerId::ZERO;
// However, we want to choose a random _remote_ peer id to ensure we _don't_
// have metrics logging the remote peer id (which would eventually OOM in
// production for public-facing nodes).
let remote_peer_id = PeerId::random();
// Mock data
let network_context = NetworkContext::mock_with_peer_id(peer_id);
let socket = ReadOnlyTestSocketVec::new(data.to_vec());
let metadata = ConnectionMetadata::new(
remote_peer_id,
ConnectionId::from(123),
NetworkAddress::mock(),
ConnectionOrigin::Inbound,
MessagingProtocolVersion::V1,
SupportedProtocols::from(
[
ProtocolId::ConsensusRpc,
ProtocolId::ConsensusDirectSend,
ProtocolId::MempoolDirectSend,
ProtocolId::StateSyncDirectSend,
ProtocolId::DiscoveryDirectSend,
ProtocolId::HealthCheckerRpc,
]
.iter(),
),
PeerRole::Unknown,
);
let connection = Connection { socket, metadata };
let (connection_notifs_tx, connection_notifs_rx) = channel::new_test(8);
let channel_size = 8;
let (peer_reqs_tx, peer_reqs_rx) = diem_channel::new(QueueStyle::FIFO, channel_size, None);
let (peer_notifs_tx, peer_notifs_rx) = diem_channel::new(QueueStyle::FIFO, channel_size, None);
// Spin up a new `Peer` actor
let peer = Peer::new(
network_context,
executor.clone(),
TimeService::mock(),
connection,
connection_notifs_tx,
peer_reqs_rx,
peer_notifs_tx,
Duration::from_millis(constants::INBOUND_RPC_TIMEOUT_MS),
constants::MAX_CONCURRENT_INBOUND_RPCS,
constants::MAX_CONCURRENT_OUTBOUND_RPCS,
constants::MAX_FRAME_SIZE,
None,
None,
);
executor.spawn(peer.start());
rt.block_on(async move {
// Wait for "remote" to disconnect (we read all data and socket read
// returns EOF), we read a disconnect request, or we fail to deserialize
// something.
connection_notifs_rx.collect::<Vec<_>>().await;
// ACK the "remote" d/c and drop our handle to the Peer actor. Then wait
// for all network notifs to drain out and finish.
drop(peer_reqs_tx);
peer_notifs_rx.collect::<Vec<_>>().await;
});
}
#[test]
fn | () {
let mut value_gen = ValueGenerator::deterministic();
for _ in 0..50 {
let corpus = generate_corpus(&mut value_gen);
fuzz(&corpus);
}
}
| test_peer_fuzzers | identifier_name |
owned_slice.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
//! A replacement for `Box<[T]>` that cbindgen can understand.
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
use std::marker::PhantomData;
use std::ops::{Deref, DerefMut};
use std::ptr::NonNull;
use std::{fmt, iter, mem, slice};
use to_shmem::{SharedMemoryBuilder, ToShmem};
/// A struct that basically replaces a `Box<[T]>`, but which cbindgen can
/// understand.
///
/// We could rely on the struct layout of `Box<[T]>` per:
///
/// https://github.com/rust-lang/unsafe-code-guidelines/blob/master/reference/src/layout/pointers.md
///
/// But handling fat pointers with cbindgen both in structs and argument
/// positions more generally is a bit tricky.
///
/// cbindgen:derive-eq=false
/// cbindgen:derive-neq=false
#[repr(C)]
pub struct OwnedSlice<T: Sized> {
ptr: NonNull<T>,
len: usize,
_phantom: PhantomData<T>,
}
impl<T: Sized> Default for OwnedSlice<T> {
#[inline]
fn default() -> Self {
Self {
len: 0,
ptr: NonNull::dangling(),
_phantom: PhantomData,
}
}
}
impl<T: Sized> Drop for OwnedSlice<T> {
#[inline]
fn drop(&mut self) {
if self.len!= 0 {
let _ = mem::replace(self, Self::default()).into_vec();
}
}
}
unsafe impl<T: Sized + Send> Send for OwnedSlice<T> {}
unsafe impl<T: Sized + Sync> Sync for OwnedSlice<T> {}
impl<T: Clone> Clone for OwnedSlice<T> {
#[inline]
fn clone(&self) -> Self {
Self::from_slice(&**self)
}
}
impl<T: fmt::Debug> fmt::Debug for OwnedSlice<T> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.deref().fmt(formatter)
}
}
impl<T: PartialEq> PartialEq for OwnedSlice<T> {
fn eq(&self, other: &Self) -> bool {
self.deref().eq(other.deref())
}
}
impl<T: Eq> Eq for OwnedSlice<T> {}
impl<T: Sized> OwnedSlice<T> {
/// Convert the OwnedSlice into a boxed slice.
#[inline]
pub fn into_box(self) -> Box<[T]> {
self.into_vec().into_boxed_slice()
}
/// Convert the OwnedSlice into a Vec.
#[inline]
pub fn into_vec(self) -> Vec<T> {
let ret = unsafe { Vec::from_raw_parts(self.ptr.as_ptr(), self.len, self.len) };
mem::forget(self);
ret
}
/// Iterate over all the elements in the slice taking ownership of them.
#[inline]
pub fn into_iter(self) -> impl Iterator<Item = T> + ExactSizeIterator {
self.into_vec().into_iter()
}
/// Convert the regular slice into an owned slice.
#[inline]
pub fn from_slice(s: &[T]) -> Self
where
T: Clone,
{
Self::from(s.to_vec())
}
}
impl<T> Deref for OwnedSlice<T> {
type Target = [T];
#[inline(always)]
fn deref(&self) -> &Self::Target {
unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
}
}
impl<T> DerefMut for OwnedSlice<T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
}
}
impl<T> From<Box<[T]>> for OwnedSlice<T> {
#[inline]
fn | (mut b: Box<[T]>) -> Self {
let len = b.len();
let ptr = unsafe { NonNull::new_unchecked(b.as_mut_ptr()) };
mem::forget(b);
Self {
len,
ptr,
_phantom: PhantomData,
}
}
}
impl<T> From<Vec<T>> for OwnedSlice<T> {
#[inline]
fn from(b: Vec<T>) -> Self {
Self::from(b.into_boxed_slice())
}
}
impl<T: Sized> MallocShallowSizeOf for OwnedSlice<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(self.ptr.as_ptr()) }
}
}
impl<T: MallocSizeOf + Sized> MallocSizeOf for OwnedSlice<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.shallow_size_of(ops) + (**self).size_of(ops)
}
}
impl<T: ToShmem + Sized> ToShmem for OwnedSlice<T> {
fn to_shmem(&self, builder: &mut SharedMemoryBuilder) -> mem::ManuallyDrop<Self> {
unsafe {
let dest = to_shmem::to_shmem_slice(self.iter(), builder);
mem::ManuallyDrop::new(Self::from(Box::from_raw(dest)))
}
}
}
impl<T> iter::FromIterator<T> for OwnedSlice<T> {
#[inline]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
Vec::from_iter(iter).into()
}
}
| from | identifier_name |
owned_slice.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
//! A replacement for `Box<[T]>` that cbindgen can understand.
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
use std::marker::PhantomData;
use std::ops::{Deref, DerefMut};
use std::ptr::NonNull;
use std::{fmt, iter, mem, slice};
use to_shmem::{SharedMemoryBuilder, ToShmem};
/// A struct that basically replaces a `Box<[T]>`, but which cbindgen can
/// understand.
///
/// We could rely on the struct layout of `Box<[T]>` per:
///
/// https://github.com/rust-lang/unsafe-code-guidelines/blob/master/reference/src/layout/pointers.md
///
/// But handling fat pointers with cbindgen both in structs and argument
/// positions more generally is a bit tricky.
///
/// cbindgen:derive-eq=false
/// cbindgen:derive-neq=false
#[repr(C)]
pub struct OwnedSlice<T: Sized> {
ptr: NonNull<T>,
len: usize,
_phantom: PhantomData<T>,
}
impl<T: Sized> Default for OwnedSlice<T> {
#[inline]
fn default() -> Self {
Self {
len: 0,
ptr: NonNull::dangling(),
_phantom: PhantomData,
}
}
}
impl<T: Sized> Drop for OwnedSlice<T> {
#[inline]
fn drop(&mut self) {
if self.len!= 0 {
let _ = mem::replace(self, Self::default()).into_vec();
}
}
}
unsafe impl<T: Sized + Send> Send for OwnedSlice<T> {}
unsafe impl<T: Sized + Sync> Sync for OwnedSlice<T> {}
impl<T: Clone> Clone for OwnedSlice<T> {
#[inline]
fn clone(&self) -> Self {
Self::from_slice(&**self)
}
}
impl<T: fmt::Debug> fmt::Debug for OwnedSlice<T> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.deref().fmt(formatter)
}
}
impl<T: PartialEq> PartialEq for OwnedSlice<T> {
fn eq(&self, other: &Self) -> bool {
self.deref().eq(other.deref())
}
}
impl<T: Eq> Eq for OwnedSlice<T> {}
impl<T: Sized> OwnedSlice<T> {
/// Convert the OwnedSlice into a boxed slice.
#[inline]
pub fn into_box(self) -> Box<[T]> {
self.into_vec().into_boxed_slice()
}
/// Convert the OwnedSlice into a Vec.
#[inline]
pub fn into_vec(self) -> Vec<T> {
let ret = unsafe { Vec::from_raw_parts(self.ptr.as_ptr(), self.len, self.len) };
mem::forget(self);
ret
}
/// Iterate over all the elements in the slice taking ownership of them.
#[inline]
pub fn into_iter(self) -> impl Iterator<Item = T> + ExactSizeIterator {
self.into_vec().into_iter()
}
/// Convert the regular slice into an owned slice.
#[inline]
pub fn from_slice(s: &[T]) -> Self
where
T: Clone,
{
Self::from(s.to_vec())
}
}
impl<T> Deref for OwnedSlice<T> {
type Target = [T];
#[inline(always)]
fn deref(&self) -> &Self::Target {
unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
}
}
impl<T> DerefMut for OwnedSlice<T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target |
}
impl<T> From<Box<[T]>> for OwnedSlice<T> {
#[inline]
fn from(mut b: Box<[T]>) -> Self {
let len = b.len();
let ptr = unsafe { NonNull::new_unchecked(b.as_mut_ptr()) };
mem::forget(b);
Self {
len,
ptr,
_phantom: PhantomData,
}
}
}
impl<T> From<Vec<T>> for OwnedSlice<T> {
#[inline]
fn from(b: Vec<T>) -> Self {
Self::from(b.into_boxed_slice())
}
}
impl<T: Sized> MallocShallowSizeOf for OwnedSlice<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(self.ptr.as_ptr()) }
}
}
impl<T: MallocSizeOf + Sized> MallocSizeOf for OwnedSlice<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.shallow_size_of(ops) + (**self).size_of(ops)
}
}
impl<T: ToShmem + Sized> ToShmem for OwnedSlice<T> {
fn to_shmem(&self, builder: &mut SharedMemoryBuilder) -> mem::ManuallyDrop<Self> {
unsafe {
let dest = to_shmem::to_shmem_slice(self.iter(), builder);
mem::ManuallyDrop::new(Self::from(Box::from_raw(dest)))
}
}
}
impl<T> iter::FromIterator<T> for OwnedSlice<T> {
#[inline]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
Vec::from_iter(iter).into()
}
}
| {
unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
} | identifier_body |
owned_slice.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
| use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
use std::marker::PhantomData;
use std::ops::{Deref, DerefMut};
use std::ptr::NonNull;
use std::{fmt, iter, mem, slice};
use to_shmem::{SharedMemoryBuilder, ToShmem};
/// A struct that basically replaces a `Box<[T]>`, but which cbindgen can
/// understand.
///
/// We could rely on the struct layout of `Box<[T]>` per:
///
/// https://github.com/rust-lang/unsafe-code-guidelines/blob/master/reference/src/layout/pointers.md
///
/// But handling fat pointers with cbindgen both in structs and argument
/// positions more generally is a bit tricky.
///
/// cbindgen:derive-eq=false
/// cbindgen:derive-neq=false
#[repr(C)]
pub struct OwnedSlice<T: Sized> {
ptr: NonNull<T>,
len: usize,
_phantom: PhantomData<T>,
}
impl<T: Sized> Default for OwnedSlice<T> {
#[inline]
fn default() -> Self {
Self {
len: 0,
ptr: NonNull::dangling(),
_phantom: PhantomData,
}
}
}
impl<T: Sized> Drop for OwnedSlice<T> {
#[inline]
fn drop(&mut self) {
if self.len!= 0 {
let _ = mem::replace(self, Self::default()).into_vec();
}
}
}
unsafe impl<T: Sized + Send> Send for OwnedSlice<T> {}
unsafe impl<T: Sized + Sync> Sync for OwnedSlice<T> {}
impl<T: Clone> Clone for OwnedSlice<T> {
#[inline]
fn clone(&self) -> Self {
Self::from_slice(&**self)
}
}
impl<T: fmt::Debug> fmt::Debug for OwnedSlice<T> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.deref().fmt(formatter)
}
}
impl<T: PartialEq> PartialEq for OwnedSlice<T> {
fn eq(&self, other: &Self) -> bool {
self.deref().eq(other.deref())
}
}
impl<T: Eq> Eq for OwnedSlice<T> {}
impl<T: Sized> OwnedSlice<T> {
/// Convert the OwnedSlice into a boxed slice.
#[inline]
pub fn into_box(self) -> Box<[T]> {
self.into_vec().into_boxed_slice()
}
/// Convert the OwnedSlice into a Vec.
#[inline]
pub fn into_vec(self) -> Vec<T> {
let ret = unsafe { Vec::from_raw_parts(self.ptr.as_ptr(), self.len, self.len) };
mem::forget(self);
ret
}
/// Iterate over all the elements in the slice taking ownership of them.
#[inline]
pub fn into_iter(self) -> impl Iterator<Item = T> + ExactSizeIterator {
self.into_vec().into_iter()
}
/// Convert the regular slice into an owned slice.
#[inline]
pub fn from_slice(s: &[T]) -> Self
where
T: Clone,
{
Self::from(s.to_vec())
}
}
impl<T> Deref for OwnedSlice<T> {
type Target = [T];
#[inline(always)]
fn deref(&self) -> &Self::Target {
unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
}
}
impl<T> DerefMut for OwnedSlice<T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
}
}
impl<T> From<Box<[T]>> for OwnedSlice<T> {
#[inline]
fn from(mut b: Box<[T]>) -> Self {
let len = b.len();
let ptr = unsafe { NonNull::new_unchecked(b.as_mut_ptr()) };
mem::forget(b);
Self {
len,
ptr,
_phantom: PhantomData,
}
}
}
impl<T> From<Vec<T>> for OwnedSlice<T> {
#[inline]
fn from(b: Vec<T>) -> Self {
Self::from(b.into_boxed_slice())
}
}
impl<T: Sized> MallocShallowSizeOf for OwnedSlice<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(self.ptr.as_ptr()) }
}
}
impl<T: MallocSizeOf + Sized> MallocSizeOf for OwnedSlice<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.shallow_size_of(ops) + (**self).size_of(ops)
}
}
impl<T: ToShmem + Sized> ToShmem for OwnedSlice<T> {
fn to_shmem(&self, builder: &mut SharedMemoryBuilder) -> mem::ManuallyDrop<Self> {
unsafe {
let dest = to_shmem::to_shmem_slice(self.iter(), builder);
mem::ManuallyDrop::new(Self::from(Box::from_raw(dest)))
}
}
}
impl<T> iter::FromIterator<T> for OwnedSlice<T> {
#[inline]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
Vec::from_iter(iter).into()
}
} | #![allow(unsafe_code)]
//! A replacement for `Box<[T]>` that cbindgen can understand.
| random_line_split |
owned_slice.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
//! A replacement for `Box<[T]>` that cbindgen can understand.
use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
use std::marker::PhantomData;
use std::ops::{Deref, DerefMut};
use std::ptr::NonNull;
use std::{fmt, iter, mem, slice};
use to_shmem::{SharedMemoryBuilder, ToShmem};
/// A struct that basically replaces a `Box<[T]>`, but which cbindgen can
/// understand.
///
/// We could rely on the struct layout of `Box<[T]>` per:
///
/// https://github.com/rust-lang/unsafe-code-guidelines/blob/master/reference/src/layout/pointers.md
///
/// But handling fat pointers with cbindgen both in structs and argument
/// positions more generally is a bit tricky.
///
/// cbindgen:derive-eq=false
/// cbindgen:derive-neq=false
#[repr(C)]
pub struct OwnedSlice<T: Sized> {
ptr: NonNull<T>,
len: usize,
_phantom: PhantomData<T>,
}
impl<T: Sized> Default for OwnedSlice<T> {
#[inline]
fn default() -> Self {
Self {
len: 0,
ptr: NonNull::dangling(),
_phantom: PhantomData,
}
}
}
impl<T: Sized> Drop for OwnedSlice<T> {
#[inline]
fn drop(&mut self) {
if self.len!= 0 |
}
}
unsafe impl<T: Sized + Send> Send for OwnedSlice<T> {}
unsafe impl<T: Sized + Sync> Sync for OwnedSlice<T> {}
impl<T: Clone> Clone for OwnedSlice<T> {
#[inline]
fn clone(&self) -> Self {
Self::from_slice(&**self)
}
}
impl<T: fmt::Debug> fmt::Debug for OwnedSlice<T> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.deref().fmt(formatter)
}
}
impl<T: PartialEq> PartialEq for OwnedSlice<T> {
fn eq(&self, other: &Self) -> bool {
self.deref().eq(other.deref())
}
}
impl<T: Eq> Eq for OwnedSlice<T> {}
impl<T: Sized> OwnedSlice<T> {
/// Convert the OwnedSlice into a boxed slice.
#[inline]
pub fn into_box(self) -> Box<[T]> {
self.into_vec().into_boxed_slice()
}
/// Convert the OwnedSlice into a Vec.
#[inline]
pub fn into_vec(self) -> Vec<T> {
let ret = unsafe { Vec::from_raw_parts(self.ptr.as_ptr(), self.len, self.len) };
mem::forget(self);
ret
}
/// Iterate over all the elements in the slice taking ownership of them.
#[inline]
pub fn into_iter(self) -> impl Iterator<Item = T> + ExactSizeIterator {
self.into_vec().into_iter()
}
/// Convert the regular slice into an owned slice.
#[inline]
pub fn from_slice(s: &[T]) -> Self
where
T: Clone,
{
Self::from(s.to_vec())
}
}
impl<T> Deref for OwnedSlice<T> {
type Target = [T];
#[inline(always)]
fn deref(&self) -> &Self::Target {
unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
}
}
impl<T> DerefMut for OwnedSlice<T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
}
}
impl<T> From<Box<[T]>> for OwnedSlice<T> {
#[inline]
fn from(mut b: Box<[T]>) -> Self {
let len = b.len();
let ptr = unsafe { NonNull::new_unchecked(b.as_mut_ptr()) };
mem::forget(b);
Self {
len,
ptr,
_phantom: PhantomData,
}
}
}
impl<T> From<Vec<T>> for OwnedSlice<T> {
#[inline]
fn from(b: Vec<T>) -> Self {
Self::from(b.into_boxed_slice())
}
}
impl<T: Sized> MallocShallowSizeOf for OwnedSlice<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(self.ptr.as_ptr()) }
}
}
impl<T: MallocSizeOf + Sized> MallocSizeOf for OwnedSlice<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.shallow_size_of(ops) + (**self).size_of(ops)
}
}
impl<T: ToShmem + Sized> ToShmem for OwnedSlice<T> {
fn to_shmem(&self, builder: &mut SharedMemoryBuilder) -> mem::ManuallyDrop<Self> {
unsafe {
let dest = to_shmem::to_shmem_slice(self.iter(), builder);
mem::ManuallyDrop::new(Self::from(Box::from_raw(dest)))
}
}
}
impl<T> iter::FromIterator<T> for OwnedSlice<T> {
#[inline]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
Vec::from_iter(iter).into()
}
}
| {
let _ = mem::replace(self, Self::default()).into_vec();
} | conditional_block |
lib.rs | #[macro_use]
extern crate prodbg_api;
use prodbg_api::*;
struct Line {
opcode: String,
regs_write: String,
regs_read: String,
address: u64,
}
///
/// Breakpoint
///
struct Breakpoint {
address: u64,
}
///
/// Holds colors use for the disassembly view. This should be possible to configure later on.
///
/*
struct Colors {
breakpoint: Color,
step_cursor: Color,
cursor: Color,
address: Color,
_bytes: Color,
}
*/
struct DisassemblyView {
exception_location: u64,
has_made_step: bool,
cursor: u64,
breakpoint_radius: f32,
breakpoint_spacing: f32,
address_size: u8,
reset_to_center: bool,
lines: Vec<Line>,
breakpoints: Vec<Breakpoint>,
}
impl DisassemblyView {
fn set_disassembly(&mut self, reader: &mut Reader) {
self.lines.clear();
for entry in reader.find_array("disassembly") {
let address = entry.find_u64("address").ok().unwrap();
let line = entry.find_string("line").ok().unwrap();
let mut regs_read = String::new();
let mut regs_write = String::new();
entry.find_string("registers_read").map(|regs| {
regs_read = regs.to_owned();
}).ok();
entry.find_string("registers_write").map(|regs| {
regs_write = regs.to_owned();
}).ok();
self.lines.push(Line {
opcode: line.to_owned(),
regs_read: regs_read,
regs_write: regs_write,
address: address,
});
}
}
///
/// Calculate how many visible lines we have
///
fn get_visible_lines_count(ui: &Ui) -> usize {
let (_, height) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
// - 1.0 for title text. Would be better to get the cursor pos here instead
let visible_lines = (height / text_height) - 1.0;
// + 0.5 to round up
(visible_lines + 0.5) as usize
}
fn request_disassembly(&mut self, ui: &mut Ui, location: u64, writer: &mut Writer) {
let visible_lines = Self::get_visible_lines_count(ui) as u64;
// check if we have the the location within all lines, then we don't need to request more
for line in &self.lines {
if line.address == location {
return;
}
}
self.reset_to_center = true;
writer.event_begin(EVENT_GET_DISASSEMBLY as u16);
writer.write_u64("address_start", location - (visible_lines * 4));
writer.write_u32("instruction_count", (visible_lines * 4) as u32);
writer.event_end();
println!("requsted {}", visible_lines * 10);
}
fn color_text_reg_selection(ui: &Ui, regs_use: &Vec<&str>, line: &Line, text_height: f32) {
let (cx, cy) = ui.get_cursor_screen_pos();
let mut color_index = 0;
// TODO: Allocs memory, fix
let line_text = format!(" 0x{:x} {}", line.address, line.opcode);
let colors = [
0x00b27474,
0x00b28050,
0x00a9b250,
0x0060b250,
0x004fb292,
0x004f71b2,
0x008850b2,
0x00b25091,
];
//let mut color_index = 0;
//let font_size = 14.0;
// TODO: Offset here is hardcoded with given hex size, this should be fixed
//let start_offset = font_size * 11.0; // 0x00000000 "
for reg in regs_use {
let color = colors[color_index & 7];
line_text.find(reg).map(|offset| {
let (tx, _) = ui.calc_text_size(&line_text, offset);
ui.fill_rect(cx + tx, cy, 22.0, text_height, Color::from_au32(200, color));
});
color_index += 1;
}
ui.text(&line_text);
}
fn toggle_breakpoint(&mut self, writer: &mut Writer) {
let address = self.cursor;
for i in (0..self.breakpoints.len()).rev() {
if self.breakpoints[i].address == address {
writer.event_begin(EVENT_DELETE_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
self.breakpoints.swap_remove(i);
return;
}
}
writer.event_begin(EVENT_SET_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
println!("adding breakpoint");
// TODO: We shouldn't really add the breakpoint here but wait for reply
// from the backend that we actually managed to set the breakpoint.
// +bonus would be a "progress" icon here instead that it's being set.
self.breakpoints.push(Breakpoint { address: address } );
}
fn has_breakpoint(&self, address: u64) -> bool {
self.breakpoints.iter().find(|bp| bp.address == address).is_some()
}
/*
fn set_cursor_at(&mut self, pos: i32) {
// if we don'n have enough lines we need to fetch more
if pos < 0 {
return;
}
// need to fetch more data here also
if pos > self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
}
*/
fn scroll_cursor(&mut self, steps: i32) {
for (i, line) in self.lines.iter().enumerate() {
if line.address == self.cursor {
let pos = (i as i32) + steps;
// if we don'n have enough lines we need to fetch more
if pos < 0 |
// need to fetch more data here also
if pos >= self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
return;
}
}
}
fn render_arrow(ui: &Ui, pos_x: f32, pos_y: f32, scale: f32) {
let color = Color::from_argb(255, 0, 180, 180);
ui.fill_rect(pos_x + (0.0 * scale),
pos_y + (0.25 * scale),
0.5 * scale,
0.5 * scale,
color);
// Wasn't able to get this to work with just one convex poly fill
let arrow: [Vec2; 3] = [
Vec2::new((0.50 * scale) + pos_x, (0.00 * scale) + pos_y),
Vec2::new((1.00 * scale) + pos_x, (0.50 * scale) + pos_y),
Vec2::new((0.50 * scale) + pos_x, (1.00 * scale) + pos_y),
];
ui.fill_convex_poly(&arrow, color, true);
}
fn render_ui(&mut self, ui: &mut Ui) {
if self.lines.len() == 0 {
return;
}
let (size_x, size_h) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
let mut regs = String::new();
let mut regs_pc_use = Vec::new();
//let font_size = ui.get_font_size();
// find registerss for pc
for line in &self.lines {
if line.address == self.exception_location {
if line.regs_read.len() > 1 || line.regs_write.len() > 1 {
if line.regs_read.len() > 0 {
regs.push_str(&line.regs_read);
}
if line.regs_write.len() > 0 {
regs.push(' ');
regs.push_str(&line.regs_write);
}
let t = regs.trim_left();
regs_pc_use = t.split(' ').collect();
break;
}
}
}
for line in &self.lines {
let (cx, cy) = ui.get_cursor_screen_pos();
let bp_radius = self.breakpoint_radius;
if line.address == self.cursor {
if (cy - text_height) < 0.0 {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy - text_height, 0.0);
}
}
if cy > (size_h - text_height) {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy + text_height, 1.0);
}
}
ui.fill_rect(cx, cy, size_x, text_height, Color::from_argb(200, 0, 0, 127));
}
if regs_pc_use.len() > 0 {
Self::color_text_reg_selection(ui, ®s_pc_use, &line, text_height);
} else {
ui.text_fmt(format_args!(" 0x{:x} {}", line.address, line.opcode));
}
if self.has_breakpoint(line.address) {
ui.fill_circle(&Vec2{ x: cx + self.breakpoint_spacing + bp_radius, y: cy + bp_radius + 2.0},
bp_radius, Color::from_argb(255,0,0,140), 12, false);
}
//println!("draw arrow {} {}", line.address, self.exception_location);
if line.address == self.exception_location {
Self::render_arrow(ui, cx, cy + 2.0, self.breakpoint_radius * 2.0);
}
}
self.has_made_step = false;
}
}
impl View for DisassemblyView {
fn new(_: &Ui, _: &Service) -> Self {
DisassemblyView {
exception_location: u64::max_value(),
cursor: 0xe003, //u64::max_value(),
breakpoint_radius: 10.0,
breakpoint_spacing: 6.0,
address_size: 4,
has_made_step: false,
lines: Vec::new(),
breakpoints: Vec::new(),
reset_to_center: false,
}
}
fn update(&mut self, ui: &mut Ui, reader: &mut Reader, writer: &mut Writer) {
for event in reader.get_events() {
match event {
EVENT_SET_EXCEPTION_LOCATION => {
let location = reader.find_u64("address").ok().unwrap();
reader.find_u8("address_size").ok().map(|adress_size| {
self.address_size = adress_size;
});
if self.exception_location!= location {
self.request_disassembly(ui, location, writer);
self.has_made_step = true;
self.exception_location = location;
self.cursor = location;
}
}
EVENT_SET_DISASSEMBLY => {
self.set_disassembly(reader);
}
_ => (),
}
}
if ui.is_key_down(Key::F9) {
self.toggle_breakpoint(writer);
}
if ui.is_key_down(Key::Down) {
self.scroll_cursor(1);
}
if ui.is_key_down(Key::Up) {
self.scroll_cursor(-1);
}
if ui.is_key_down(Key::PageDown) {
self.scroll_cursor(8);
}
if ui.is_key_down(Key::PageUp) {
self.scroll_cursor(-8);
}
self.render_ui(ui);
}
}
#[no_mangle]
pub fn init_plugin(plugin_handler: &mut PluginHandler) {
define_view_plugin!(PLUGIN, b"Disassembly2 View", DisassemblyView);
plugin_handler.register_view(&PLUGIN);
}
| {
return;
} | conditional_block |
lib.rs | #[macro_use]
extern crate prodbg_api;
use prodbg_api::*;
struct Line {
opcode: String,
regs_write: String,
regs_read: String,
address: u64,
}
///
/// Breakpoint
///
struct Breakpoint {
address: u64,
}
///
/// Holds colors use for the disassembly view. This should be possible to configure later on.
///
/*
struct Colors {
breakpoint: Color,
step_cursor: Color,
cursor: Color,
address: Color,
_bytes: Color,
}
*/
struct DisassemblyView {
exception_location: u64,
has_made_step: bool,
cursor: u64,
breakpoint_radius: f32,
breakpoint_spacing: f32,
address_size: u8,
reset_to_center: bool,
lines: Vec<Line>,
breakpoints: Vec<Breakpoint>,
}
impl DisassemblyView {
fn set_disassembly(&mut self, reader: &mut Reader) {
self.lines.clear();
for entry in reader.find_array("disassembly") {
let address = entry.find_u64("address").ok().unwrap();
let line = entry.find_string("line").ok().unwrap();
let mut regs_read = String::new();
let mut regs_write = String::new();
entry.find_string("registers_read").map(|regs| {
regs_read = regs.to_owned();
}).ok();
entry.find_string("registers_write").map(|regs| {
regs_write = regs.to_owned();
}).ok();
self.lines.push(Line {
opcode: line.to_owned(),
regs_read: regs_read,
regs_write: regs_write,
address: address,
});
}
}
///
/// Calculate how many visible lines we have
///
fn get_visible_lines_count(ui: &Ui) -> usize {
let (_, height) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
// - 1.0 for title text. Would be better to get the cursor pos here instead
let visible_lines = (height / text_height) - 1.0;
// + 0.5 to round up
(visible_lines + 0.5) as usize
}
fn request_disassembly(&mut self, ui: &mut Ui, location: u64, writer: &mut Writer) {
let visible_lines = Self::get_visible_lines_count(ui) as u64;
// check if we have the the location within all lines, then we don't need to request more
for line in &self.lines {
if line.address == location {
return;
}
}
self.reset_to_center = true;
writer.event_begin(EVENT_GET_DISASSEMBLY as u16);
writer.write_u64("address_start", location - (visible_lines * 4));
writer.write_u32("instruction_count", (visible_lines * 4) as u32);
writer.event_end();
println!("requsted {}", visible_lines * 10);
}
fn color_text_reg_selection(ui: &Ui, regs_use: &Vec<&str>, line: &Line, text_height: f32) {
let (cx, cy) = ui.get_cursor_screen_pos();
let mut color_index = 0;
// TODO: Allocs memory, fix
let line_text = format!(" 0x{:x} {}", line.address, line.opcode);
let colors = [
0x00b27474,
0x00b28050,
0x00a9b250,
0x0060b250,
0x004fb292,
0x004f71b2,
0x008850b2,
0x00b25091,
];
//let mut color_index = 0;
//let font_size = 14.0;
// TODO: Offset here is hardcoded with given hex size, this should be fixed
//let start_offset = font_size * 11.0; // 0x00000000 "
for reg in regs_use {
let color = colors[color_index & 7];
line_text.find(reg).map(|offset| {
let (tx, _) = ui.calc_text_size(&line_text, offset);
ui.fill_rect(cx + tx, cy, 22.0, text_height, Color::from_au32(200, color));
});
color_index += 1;
}
ui.text(&line_text);
}
fn | (&mut self, writer: &mut Writer) {
let address = self.cursor;
for i in (0..self.breakpoints.len()).rev() {
if self.breakpoints[i].address == address {
writer.event_begin(EVENT_DELETE_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
self.breakpoints.swap_remove(i);
return;
}
}
writer.event_begin(EVENT_SET_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
println!("adding breakpoint");
// TODO: We shouldn't really add the breakpoint here but wait for reply
// from the backend that we actually managed to set the breakpoint.
// +bonus would be a "progress" icon here instead that it's being set.
self.breakpoints.push(Breakpoint { address: address } );
}
fn has_breakpoint(&self, address: u64) -> bool {
self.breakpoints.iter().find(|bp| bp.address == address).is_some()
}
/*
fn set_cursor_at(&mut self, pos: i32) {
// if we don'n have enough lines we need to fetch more
if pos < 0 {
return;
}
// need to fetch more data here also
if pos > self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
}
*/
fn scroll_cursor(&mut self, steps: i32) {
for (i, line) in self.lines.iter().enumerate() {
if line.address == self.cursor {
let pos = (i as i32) + steps;
// if we don'n have enough lines we need to fetch more
if pos < 0 {
return;
}
// need to fetch more data here also
if pos >= self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
return;
}
}
}
fn render_arrow(ui: &Ui, pos_x: f32, pos_y: f32, scale: f32) {
let color = Color::from_argb(255, 0, 180, 180);
ui.fill_rect(pos_x + (0.0 * scale),
pos_y + (0.25 * scale),
0.5 * scale,
0.5 * scale,
color);
// Wasn't able to get this to work with just one convex poly fill
let arrow: [Vec2; 3] = [
Vec2::new((0.50 * scale) + pos_x, (0.00 * scale) + pos_y),
Vec2::new((1.00 * scale) + pos_x, (0.50 * scale) + pos_y),
Vec2::new((0.50 * scale) + pos_x, (1.00 * scale) + pos_y),
];
ui.fill_convex_poly(&arrow, color, true);
}
fn render_ui(&mut self, ui: &mut Ui) {
if self.lines.len() == 0 {
return;
}
let (size_x, size_h) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
let mut regs = String::new();
let mut regs_pc_use = Vec::new();
//let font_size = ui.get_font_size();
// find registerss for pc
for line in &self.lines {
if line.address == self.exception_location {
if line.regs_read.len() > 1 || line.regs_write.len() > 1 {
if line.regs_read.len() > 0 {
regs.push_str(&line.regs_read);
}
if line.regs_write.len() > 0 {
regs.push(' ');
regs.push_str(&line.regs_write);
}
let t = regs.trim_left();
regs_pc_use = t.split(' ').collect();
break;
}
}
}
for line in &self.lines {
let (cx, cy) = ui.get_cursor_screen_pos();
let bp_radius = self.breakpoint_radius;
if line.address == self.cursor {
if (cy - text_height) < 0.0 {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy - text_height, 0.0);
}
}
if cy > (size_h - text_height) {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy + text_height, 1.0);
}
}
ui.fill_rect(cx, cy, size_x, text_height, Color::from_argb(200, 0, 0, 127));
}
if regs_pc_use.len() > 0 {
Self::color_text_reg_selection(ui, ®s_pc_use, &line, text_height);
} else {
ui.text_fmt(format_args!(" 0x{:x} {}", line.address, line.opcode));
}
if self.has_breakpoint(line.address) {
ui.fill_circle(&Vec2{ x: cx + self.breakpoint_spacing + bp_radius, y: cy + bp_radius + 2.0},
bp_radius, Color::from_argb(255,0,0,140), 12, false);
}
//println!("draw arrow {} {}", line.address, self.exception_location);
if line.address == self.exception_location {
Self::render_arrow(ui, cx, cy + 2.0, self.breakpoint_radius * 2.0);
}
}
self.has_made_step = false;
}
}
impl View for DisassemblyView {
fn new(_: &Ui, _: &Service) -> Self {
DisassemblyView {
exception_location: u64::max_value(),
cursor: 0xe003, //u64::max_value(),
breakpoint_radius: 10.0,
breakpoint_spacing: 6.0,
address_size: 4,
has_made_step: false,
lines: Vec::new(),
breakpoints: Vec::new(),
reset_to_center: false,
}
}
fn update(&mut self, ui: &mut Ui, reader: &mut Reader, writer: &mut Writer) {
for event in reader.get_events() {
match event {
EVENT_SET_EXCEPTION_LOCATION => {
let location = reader.find_u64("address").ok().unwrap();
reader.find_u8("address_size").ok().map(|adress_size| {
self.address_size = adress_size;
});
if self.exception_location!= location {
self.request_disassembly(ui, location, writer);
self.has_made_step = true;
self.exception_location = location;
self.cursor = location;
}
}
EVENT_SET_DISASSEMBLY => {
self.set_disassembly(reader);
}
_ => (),
}
}
if ui.is_key_down(Key::F9) {
self.toggle_breakpoint(writer);
}
if ui.is_key_down(Key::Down) {
self.scroll_cursor(1);
}
if ui.is_key_down(Key::Up) {
self.scroll_cursor(-1);
}
if ui.is_key_down(Key::PageDown) {
self.scroll_cursor(8);
}
if ui.is_key_down(Key::PageUp) {
self.scroll_cursor(-8);
}
self.render_ui(ui);
}
}
#[no_mangle]
pub fn init_plugin(plugin_handler: &mut PluginHandler) {
define_view_plugin!(PLUGIN, b"Disassembly2 View", DisassemblyView);
plugin_handler.register_view(&PLUGIN);
}
| toggle_breakpoint | identifier_name |
lib.rs | #[macro_use]
extern crate prodbg_api;
| opcode: String,
regs_write: String,
regs_read: String,
address: u64,
}
///
/// Breakpoint
///
struct Breakpoint {
address: u64,
}
///
/// Holds colors use for the disassembly view. This should be possible to configure later on.
///
/*
struct Colors {
breakpoint: Color,
step_cursor: Color,
cursor: Color,
address: Color,
_bytes: Color,
}
*/
struct DisassemblyView {
exception_location: u64,
has_made_step: bool,
cursor: u64,
breakpoint_radius: f32,
breakpoint_spacing: f32,
address_size: u8,
reset_to_center: bool,
lines: Vec<Line>,
breakpoints: Vec<Breakpoint>,
}
impl DisassemblyView {
fn set_disassembly(&mut self, reader: &mut Reader) {
self.lines.clear();
for entry in reader.find_array("disassembly") {
let address = entry.find_u64("address").ok().unwrap();
let line = entry.find_string("line").ok().unwrap();
let mut regs_read = String::new();
let mut regs_write = String::new();
entry.find_string("registers_read").map(|regs| {
regs_read = regs.to_owned();
}).ok();
entry.find_string("registers_write").map(|regs| {
regs_write = regs.to_owned();
}).ok();
self.lines.push(Line {
opcode: line.to_owned(),
regs_read: regs_read,
regs_write: regs_write,
address: address,
});
}
}
///
/// Calculate how many visible lines we have
///
fn get_visible_lines_count(ui: &Ui) -> usize {
let (_, height) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
// - 1.0 for title text. Would be better to get the cursor pos here instead
let visible_lines = (height / text_height) - 1.0;
// + 0.5 to round up
(visible_lines + 0.5) as usize
}
fn request_disassembly(&mut self, ui: &mut Ui, location: u64, writer: &mut Writer) {
let visible_lines = Self::get_visible_lines_count(ui) as u64;
// check if we have the the location within all lines, then we don't need to request more
for line in &self.lines {
if line.address == location {
return;
}
}
self.reset_to_center = true;
writer.event_begin(EVENT_GET_DISASSEMBLY as u16);
writer.write_u64("address_start", location - (visible_lines * 4));
writer.write_u32("instruction_count", (visible_lines * 4) as u32);
writer.event_end();
println!("requsted {}", visible_lines * 10);
}
fn color_text_reg_selection(ui: &Ui, regs_use: &Vec<&str>, line: &Line, text_height: f32) {
let (cx, cy) = ui.get_cursor_screen_pos();
let mut color_index = 0;
// TODO: Allocs memory, fix
let line_text = format!(" 0x{:x} {}", line.address, line.opcode);
let colors = [
0x00b27474,
0x00b28050,
0x00a9b250,
0x0060b250,
0x004fb292,
0x004f71b2,
0x008850b2,
0x00b25091,
];
//let mut color_index = 0;
//let font_size = 14.0;
// TODO: Offset here is hardcoded with given hex size, this should be fixed
//let start_offset = font_size * 11.0; // 0x00000000 "
for reg in regs_use {
let color = colors[color_index & 7];
line_text.find(reg).map(|offset| {
let (tx, _) = ui.calc_text_size(&line_text, offset);
ui.fill_rect(cx + tx, cy, 22.0, text_height, Color::from_au32(200, color));
});
color_index += 1;
}
ui.text(&line_text);
}
fn toggle_breakpoint(&mut self, writer: &mut Writer) {
let address = self.cursor;
for i in (0..self.breakpoints.len()).rev() {
if self.breakpoints[i].address == address {
writer.event_begin(EVENT_DELETE_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
self.breakpoints.swap_remove(i);
return;
}
}
writer.event_begin(EVENT_SET_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
println!("adding breakpoint");
// TODO: We shouldn't really add the breakpoint here but wait for reply
// from the backend that we actually managed to set the breakpoint.
// +bonus would be a "progress" icon here instead that it's being set.
self.breakpoints.push(Breakpoint { address: address } );
}
fn has_breakpoint(&self, address: u64) -> bool {
self.breakpoints.iter().find(|bp| bp.address == address).is_some()
}
/*
fn set_cursor_at(&mut self, pos: i32) {
// if we don'n have enough lines we need to fetch more
if pos < 0 {
return;
}
// need to fetch more data here also
if pos > self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
}
*/
fn scroll_cursor(&mut self, steps: i32) {
for (i, line) in self.lines.iter().enumerate() {
if line.address == self.cursor {
let pos = (i as i32) + steps;
// if we don'n have enough lines we need to fetch more
if pos < 0 {
return;
}
// need to fetch more data here also
if pos >= self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
return;
}
}
}
fn render_arrow(ui: &Ui, pos_x: f32, pos_y: f32, scale: f32) {
let color = Color::from_argb(255, 0, 180, 180);
ui.fill_rect(pos_x + (0.0 * scale),
pos_y + (0.25 * scale),
0.5 * scale,
0.5 * scale,
color);
// Wasn't able to get this to work with just one convex poly fill
let arrow: [Vec2; 3] = [
Vec2::new((0.50 * scale) + pos_x, (0.00 * scale) + pos_y),
Vec2::new((1.00 * scale) + pos_x, (0.50 * scale) + pos_y),
Vec2::new((0.50 * scale) + pos_x, (1.00 * scale) + pos_y),
];
ui.fill_convex_poly(&arrow, color, true);
}
fn render_ui(&mut self, ui: &mut Ui) {
if self.lines.len() == 0 {
return;
}
let (size_x, size_h) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
let mut regs = String::new();
let mut regs_pc_use = Vec::new();
//let font_size = ui.get_font_size();
// find registerss for pc
for line in &self.lines {
if line.address == self.exception_location {
if line.regs_read.len() > 1 || line.regs_write.len() > 1 {
if line.regs_read.len() > 0 {
regs.push_str(&line.regs_read);
}
if line.regs_write.len() > 0 {
regs.push(' ');
regs.push_str(&line.regs_write);
}
let t = regs.trim_left();
regs_pc_use = t.split(' ').collect();
break;
}
}
}
for line in &self.lines {
let (cx, cy) = ui.get_cursor_screen_pos();
let bp_radius = self.breakpoint_radius;
if line.address == self.cursor {
if (cy - text_height) < 0.0 {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy - text_height, 0.0);
}
}
if cy > (size_h - text_height) {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy + text_height, 1.0);
}
}
ui.fill_rect(cx, cy, size_x, text_height, Color::from_argb(200, 0, 0, 127));
}
if regs_pc_use.len() > 0 {
Self::color_text_reg_selection(ui, ®s_pc_use, &line, text_height);
} else {
ui.text_fmt(format_args!(" 0x{:x} {}", line.address, line.opcode));
}
if self.has_breakpoint(line.address) {
ui.fill_circle(&Vec2{ x: cx + self.breakpoint_spacing + bp_radius, y: cy + bp_radius + 2.0},
bp_radius, Color::from_argb(255,0,0,140), 12, false);
}
//println!("draw arrow {} {}", line.address, self.exception_location);
if line.address == self.exception_location {
Self::render_arrow(ui, cx, cy + 2.0, self.breakpoint_radius * 2.0);
}
}
self.has_made_step = false;
}
}
impl View for DisassemblyView {
fn new(_: &Ui, _: &Service) -> Self {
DisassemblyView {
exception_location: u64::max_value(),
cursor: 0xe003, //u64::max_value(),
breakpoint_radius: 10.0,
breakpoint_spacing: 6.0,
address_size: 4,
has_made_step: false,
lines: Vec::new(),
breakpoints: Vec::new(),
reset_to_center: false,
}
}
fn update(&mut self, ui: &mut Ui, reader: &mut Reader, writer: &mut Writer) {
for event in reader.get_events() {
match event {
EVENT_SET_EXCEPTION_LOCATION => {
let location = reader.find_u64("address").ok().unwrap();
reader.find_u8("address_size").ok().map(|adress_size| {
self.address_size = adress_size;
});
if self.exception_location!= location {
self.request_disassembly(ui, location, writer);
self.has_made_step = true;
self.exception_location = location;
self.cursor = location;
}
}
EVENT_SET_DISASSEMBLY => {
self.set_disassembly(reader);
}
_ => (),
}
}
if ui.is_key_down(Key::F9) {
self.toggle_breakpoint(writer);
}
if ui.is_key_down(Key::Down) {
self.scroll_cursor(1);
}
if ui.is_key_down(Key::Up) {
self.scroll_cursor(-1);
}
if ui.is_key_down(Key::PageDown) {
self.scroll_cursor(8);
}
if ui.is_key_down(Key::PageUp) {
self.scroll_cursor(-8);
}
self.render_ui(ui);
}
}
#[no_mangle]
pub fn init_plugin(plugin_handler: &mut PluginHandler) {
define_view_plugin!(PLUGIN, b"Disassembly2 View", DisassemblyView);
plugin_handler.register_view(&PLUGIN);
} | use prodbg_api::*;
struct Line { | random_line_split |
lib.rs | #[macro_use]
extern crate prodbg_api;
use prodbg_api::*;
struct Line {
opcode: String,
regs_write: String,
regs_read: String,
address: u64,
}
///
/// Breakpoint
///
struct Breakpoint {
address: u64,
}
///
/// Holds colors use for the disassembly view. This should be possible to configure later on.
///
/*
struct Colors {
breakpoint: Color,
step_cursor: Color,
cursor: Color,
address: Color,
_bytes: Color,
}
*/
struct DisassemblyView {
exception_location: u64,
has_made_step: bool,
cursor: u64,
breakpoint_radius: f32,
breakpoint_spacing: f32,
address_size: u8,
reset_to_center: bool,
lines: Vec<Line>,
breakpoints: Vec<Breakpoint>,
}
impl DisassemblyView {
fn set_disassembly(&mut self, reader: &mut Reader) {
self.lines.clear();
for entry in reader.find_array("disassembly") {
let address = entry.find_u64("address").ok().unwrap();
let line = entry.find_string("line").ok().unwrap();
let mut regs_read = String::new();
let mut regs_write = String::new();
entry.find_string("registers_read").map(|regs| {
regs_read = regs.to_owned();
}).ok();
entry.find_string("registers_write").map(|regs| {
regs_write = regs.to_owned();
}).ok();
self.lines.push(Line {
opcode: line.to_owned(),
regs_read: regs_read,
regs_write: regs_write,
address: address,
});
}
}
///
/// Calculate how many visible lines we have
///
fn get_visible_lines_count(ui: &Ui) -> usize {
let (_, height) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
// - 1.0 for title text. Would be better to get the cursor pos here instead
let visible_lines = (height / text_height) - 1.0;
// + 0.5 to round up
(visible_lines + 0.5) as usize
}
fn request_disassembly(&mut self, ui: &mut Ui, location: u64, writer: &mut Writer) {
let visible_lines = Self::get_visible_lines_count(ui) as u64;
// check if we have the the location within all lines, then we don't need to request more
for line in &self.lines {
if line.address == location {
return;
}
}
self.reset_to_center = true;
writer.event_begin(EVENT_GET_DISASSEMBLY as u16);
writer.write_u64("address_start", location - (visible_lines * 4));
writer.write_u32("instruction_count", (visible_lines * 4) as u32);
writer.event_end();
println!("requsted {}", visible_lines * 10);
}
fn color_text_reg_selection(ui: &Ui, regs_use: &Vec<&str>, line: &Line, text_height: f32) {
let (cx, cy) = ui.get_cursor_screen_pos();
let mut color_index = 0;
// TODO: Allocs memory, fix
let line_text = format!(" 0x{:x} {}", line.address, line.opcode);
let colors = [
0x00b27474,
0x00b28050,
0x00a9b250,
0x0060b250,
0x004fb292,
0x004f71b2,
0x008850b2,
0x00b25091,
];
//let mut color_index = 0;
//let font_size = 14.0;
// TODO: Offset here is hardcoded with given hex size, this should be fixed
//let start_offset = font_size * 11.0; // 0x00000000 "
for reg in regs_use {
let color = colors[color_index & 7];
line_text.find(reg).map(|offset| {
let (tx, _) = ui.calc_text_size(&line_text, offset);
ui.fill_rect(cx + tx, cy, 22.0, text_height, Color::from_au32(200, color));
});
color_index += 1;
}
ui.text(&line_text);
}
fn toggle_breakpoint(&mut self, writer: &mut Writer) {
let address = self.cursor;
for i in (0..self.breakpoints.len()).rev() {
if self.breakpoints[i].address == address {
writer.event_begin(EVENT_DELETE_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
self.breakpoints.swap_remove(i);
return;
}
}
writer.event_begin(EVENT_SET_BREAKPOINT as u16);
writer.write_u64("address", address);
writer.event_end();
println!("adding breakpoint");
// TODO: We shouldn't really add the breakpoint here but wait for reply
// from the backend that we actually managed to set the breakpoint.
// +bonus would be a "progress" icon here instead that it's being set.
self.breakpoints.push(Breakpoint { address: address } );
}
fn has_breakpoint(&self, address: u64) -> bool {
self.breakpoints.iter().find(|bp| bp.address == address).is_some()
}
/*
fn set_cursor_at(&mut self, pos: i32) {
// if we don'n have enough lines we need to fetch more
if pos < 0 {
return;
}
// need to fetch more data here also
if pos > self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
}
*/
fn scroll_cursor(&mut self, steps: i32) {
for (i, line) in self.lines.iter().enumerate() {
if line.address == self.cursor {
let pos = (i as i32) + steps;
// if we don'n have enough lines we need to fetch more
if pos < 0 {
return;
}
// need to fetch more data here also
if pos >= self.lines.len() as i32 {
return;
}
self.cursor = self.lines[pos as usize].address;
return;
}
}
}
fn render_arrow(ui: &Ui, pos_x: f32, pos_y: f32, scale: f32) {
let color = Color::from_argb(255, 0, 180, 180);
ui.fill_rect(pos_x + (0.0 * scale),
pos_y + (0.25 * scale),
0.5 * scale,
0.5 * scale,
color);
// Wasn't able to get this to work with just one convex poly fill
let arrow: [Vec2; 3] = [
Vec2::new((0.50 * scale) + pos_x, (0.00 * scale) + pos_y),
Vec2::new((1.00 * scale) + pos_x, (0.50 * scale) + pos_y),
Vec2::new((0.50 * scale) + pos_x, (1.00 * scale) + pos_y),
];
ui.fill_convex_poly(&arrow, color, true);
}
fn render_ui(&mut self, ui: &mut Ui) {
if self.lines.len() == 0 {
return;
}
let (size_x, size_h) = ui.get_window_size();
let text_height = ui.get_text_line_height_with_spacing();
let mut regs = String::new();
let mut regs_pc_use = Vec::new();
//let font_size = ui.get_font_size();
// find registerss for pc
for line in &self.lines {
if line.address == self.exception_location {
if line.regs_read.len() > 1 || line.regs_write.len() > 1 {
if line.regs_read.len() > 0 {
regs.push_str(&line.regs_read);
}
if line.regs_write.len() > 0 {
regs.push(' ');
regs.push_str(&line.regs_write);
}
let t = regs.trim_left();
regs_pc_use = t.split(' ').collect();
break;
}
}
}
for line in &self.lines {
let (cx, cy) = ui.get_cursor_screen_pos();
let bp_radius = self.breakpoint_radius;
if line.address == self.cursor {
if (cy - text_height) < 0.0 {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy - text_height, 0.0);
}
}
if cy > (size_h - text_height) {
if self.reset_to_center || self.has_made_step {
ui.set_scroll_here(0.5);
self.reset_to_center = false;
} else {
ui.set_scroll_from_pos_y(cy + text_height, 1.0);
}
}
ui.fill_rect(cx, cy, size_x, text_height, Color::from_argb(200, 0, 0, 127));
}
if regs_pc_use.len() > 0 {
Self::color_text_reg_selection(ui, ®s_pc_use, &line, text_height);
} else {
ui.text_fmt(format_args!(" 0x{:x} {}", line.address, line.opcode));
}
if self.has_breakpoint(line.address) {
ui.fill_circle(&Vec2{ x: cx + self.breakpoint_spacing + bp_radius, y: cy + bp_radius + 2.0},
bp_radius, Color::from_argb(255,0,0,140), 12, false);
}
//println!("draw arrow {} {}", line.address, self.exception_location);
if line.address == self.exception_location {
Self::render_arrow(ui, cx, cy + 2.0, self.breakpoint_radius * 2.0);
}
}
self.has_made_step = false;
}
}
impl View for DisassemblyView {
fn new(_: &Ui, _: &Service) -> Self {
DisassemblyView {
exception_location: u64::max_value(),
cursor: 0xe003, //u64::max_value(),
breakpoint_radius: 10.0,
breakpoint_spacing: 6.0,
address_size: 4,
has_made_step: false,
lines: Vec::new(),
breakpoints: Vec::new(),
reset_to_center: false,
}
}
fn update(&mut self, ui: &mut Ui, reader: &mut Reader, writer: &mut Writer) {
for event in reader.get_events() {
match event {
EVENT_SET_EXCEPTION_LOCATION => {
let location = reader.find_u64("address").ok().unwrap();
reader.find_u8("address_size").ok().map(|adress_size| {
self.address_size = adress_size;
});
if self.exception_location!= location {
self.request_disassembly(ui, location, writer);
self.has_made_step = true;
self.exception_location = location;
self.cursor = location;
}
}
EVENT_SET_DISASSEMBLY => {
self.set_disassembly(reader);
}
_ => (),
}
}
if ui.is_key_down(Key::F9) {
self.toggle_breakpoint(writer);
}
if ui.is_key_down(Key::Down) {
self.scroll_cursor(1);
}
if ui.is_key_down(Key::Up) {
self.scroll_cursor(-1);
}
if ui.is_key_down(Key::PageDown) {
self.scroll_cursor(8);
}
if ui.is_key_down(Key::PageUp) {
self.scroll_cursor(-8);
}
self.render_ui(ui);
}
}
#[no_mangle]
pub fn init_plugin(plugin_handler: &mut PluginHandler) | {
define_view_plugin!(PLUGIN, b"Disassembly2 View", DisassemblyView);
plugin_handler.register_view(&PLUGIN);
} | identifier_body |
|
account.rs | extern crate meg;
extern crate log;
use log::*;
use std::env;
use std::clone::Clone;
use turbo::util::{CliResult, Config};
use self::meg::ops::meg_account_create as Act;
use self::meg::ops::meg_account_show as Show;
#[derive(RustcDecodable, Clone)]
pub struct Options {
pub flag_create: String,
pub flag_show: bool,
pub flag_verbose: bool,
}
pub const USAGE: &'static str = "
Usage:
meg account [options]
Options:
-h, --help Print this message
--create EMAIL Provide an email to create a new account
--show View your account details
-v, --verbose Use verbose output
";
pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
debug!("executing; cmd=meg-account; args={:?}", env::args().collect::<Vec<_>>());
config.shell().set_verbose(options.flag_verbose);
let vec = env::args().collect::<Vec<_>>();
for x in vec.iter() {
if x == "--create" {
let mut acct: Act::Createoptions = Act::CreateAcc::new();
acct.email = options.flag_create.clone();
acct.create();
} else if x == "--show" {
let mut acct: Show::Showoptions = Show::ShowAcc::new(); //Not reqd - to expand later if
acct.email = options.flag_create.clone(); //multiple accounts needs to be showed
acct.show();
|
} | }
}
return Ok(None) | random_line_split |
account.rs | extern crate meg;
extern crate log;
use log::*;
use std::env;
use std::clone::Clone;
use turbo::util::{CliResult, Config};
use self::meg::ops::meg_account_create as Act;
use self::meg::ops::meg_account_show as Show;
#[derive(RustcDecodable, Clone)]
pub struct | {
pub flag_create: String,
pub flag_show: bool,
pub flag_verbose: bool,
}
pub const USAGE: &'static str = "
Usage:
meg account [options]
Options:
-h, --help Print this message
--create EMAIL Provide an email to create a new account
--show View your account details
-v, --verbose Use verbose output
";
pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
debug!("executing; cmd=meg-account; args={:?}", env::args().collect::<Vec<_>>());
config.shell().set_verbose(options.flag_verbose);
let vec = env::args().collect::<Vec<_>>();
for x in vec.iter() {
if x == "--create" {
let mut acct: Act::Createoptions = Act::CreateAcc::new();
acct.email = options.flag_create.clone();
acct.create();
} else if x == "--show" {
let mut acct: Show::Showoptions = Show::ShowAcc::new(); //Not reqd - to expand later if
acct.email = options.flag_create.clone(); //multiple accounts needs to be showed
acct.show();
}
}
return Ok(None)
}
| Options | identifier_name |
lib.rs | #![feature(core)]
/// Generate a new iterable witn a list comprehension. This macro tries to follow the syntax of | /// Python's list comprehensions. This is a very flexable macro that allows the generation of any
/// iterable that implements `std::iter::FromIterator`. The resulting type will be determined by
/// the type of the variable that you are attempting to assign to. You can create a `Vec`:
///
/// ```ignore
/// let x: Vec<i32> = gen![i*30 => i in [1, 2, 3, 4, 5]];
/// ```
///
/// You can generate a `HashSet`:
///
/// ```ignore
/// let x: HashSet<i32> = gen![i*30 => i in [1, 2, 3, 4, 5]];
/// ```
///
/// You can even use conditionals to generate stuff:
///
/// ```ignore
/// let x: HashSet<i32> = gen![i => i in [1, 2, 3, 4, 5], x % 2 == 0];
/// assert_eq!(x, vec![2, 4]);
/// ```
///
/// Comparisson to Python's list comprehension
/// ===
///
/// Python
/// ---
/// ```python
/// x = [i*4 for i in range(1, 5)]
/// ```
///
/// Rust with gen! macro
/// ---
/// ```ignore
/// let x: Vec<i32> = gen!(x*4 => x in [1, 2, 3, 4]);
/// ```
#[macro_export]
#[macro_use]
macro_rules! gen {
[$e:expr => $variable:ident in $iterable:expr] => (
$iterable.iter().cloned().map(|$variable| $e).collect()
);
[$e:expr => $variable:ident in $iterable:expr, $condition:expr] => (
$iterable.iter().cloned().filter(|$variable| $condition).map(|$variable| $e).collect()
);
} | random_line_split |
|
executive.rs | // Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use super::test_common::*;
use state::{Backend as StateBackend, State, Substate};
use executive::*;
use evm::{VMType, Finalize};
use vm::{
self, ActionParams, CallType, Schedule, Ext,
ContractCreateResult, EnvInfo, MessageCallResult,
CreateContractAddress, ReturnData,
};
use externalities::*;
use tests::helpers::*;
use ethjson;
use trace::{Tracer, NoopTracer};
use trace::{VMTracer, NoopVMTracer};
use bytes::{Bytes, BytesRef};
use trie; | use machine::EthereumMachine as Machine;
#[derive(Debug, PartialEq, Clone)]
struct CallCreate {
data: Bytes,
destination: Option<Address>,
gas_limit: U256,
value: U256
}
impl From<ethjson::vm::Call> for CallCreate {
fn from(c: ethjson::vm::Call) -> Self {
let dst: Option<ethjson::hash::Address> = c.destination.into();
CallCreate {
data: c.data.into(),
destination: dst.map(Into::into),
gas_limit: c.gas_limit.into(),
value: c.value.into()
}
}
}
/// Tiny wrapper around executive externalities.
/// Stores callcreates.
struct TestExt<'a, T: 'a, V: 'a, B: 'a>
where T: Tracer, V: VMTracer, B: StateBackend
{
ext: Externalities<'a, T, V, B>,
callcreates: Vec<CallCreate>,
nonce: U256,
sender: Address,
}
impl<'a, T: 'a, V: 'a, B: 'a> TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend,
{
fn new(
state: &'a mut State<B>,
info: &'a EnvInfo,
machine: &'a Machine,
depth: usize,
origin_info: OriginInfo,
substate: &'a mut Substate,
output: OutputPolicy<'a, 'a>,
address: Address,
tracer: &'a mut T,
vm_tracer: &'a mut V,
) -> trie::Result<Self> {
let static_call = false;
Ok(TestExt {
nonce: state.nonce(&address)?,
ext: Externalities::new(state, info, machine, depth, origin_info, substate, output, tracer, vm_tracer, static_call),
callcreates: vec![],
sender: address,
})
}
}
impl<'a, T: 'a, V: 'a, B: 'a> Ext for TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend
{
fn storage_at(&self, key: &H256) -> vm::Result<H256> {
self.ext.storage_at(key)
}
fn set_storage(&mut self, key: H256, value: H256) -> vm::Result<()> {
self.ext.set_storage(key, value)
}
fn exists(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists(address)
}
fn exists_and_not_null(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists_and_not_null(address)
}
fn balance(&self, address: &Address) -> vm::Result<U256> {
self.ext.balance(address)
}
fn origin_balance(&self) -> vm::Result<U256> {
self.ext.origin_balance()
}
fn blockhash(&mut self, number: &U256) -> H256 {
self.ext.blockhash(number)
}
fn create(&mut self, gas: &U256, value: &U256, code: &[u8], address: CreateContractAddress) -> ContractCreateResult {
self.callcreates.push(CallCreate {
data: code.to_vec(),
destination: None,
gas_limit: *gas,
value: *value
});
let contract_address = contract_address(address, &self.sender, &self.nonce, &code).0;
ContractCreateResult::Created(contract_address, *gas)
}
fn call(&mut self,
gas: &U256,
_sender_address: &Address,
receive_address: &Address,
value: Option<U256>,
data: &[u8],
_code_address: &Address,
_output: &mut [u8],
_call_type: CallType
) -> MessageCallResult {
self.callcreates.push(CallCreate {
data: data.to_vec(),
destination: Some(receive_address.clone()),
gas_limit: *gas,
value: value.unwrap()
});
MessageCallResult::Success(*gas, ReturnData::empty())
}
fn extcode(&self, address: &Address) -> vm::Result<Arc<Bytes>> {
self.ext.extcode(address)
}
fn extcodesize(&self, address: &Address) -> vm::Result<usize> {
self.ext.extcodesize(address)
}
fn log(&mut self, topics: Vec<H256>, data: &[u8]) -> vm::Result<()> {
self.ext.log(topics, data)
}
fn ret(self, gas: &U256, data: &ReturnData, apply_state: bool) -> Result<U256, vm::Error> {
self.ext.ret(gas, data, apply_state)
}
fn suicide(&mut self, refund_address: &Address) -> vm::Result<()> {
self.ext.suicide(refund_address)
}
fn schedule(&self) -> &Schedule {
self.ext.schedule()
}
fn env_info(&self) -> &EnvInfo {
self.ext.env_info()
}
fn depth(&self) -> usize {
0
}
fn is_static(&self) -> bool {
false
}
fn inc_sstore_clears(&mut self) {
self.ext.inc_sstore_clears()
}
}
fn do_json_test(json_data: &[u8]) -> Vec<String> {
let vms = VMType::all();
vms
.iter()
.flat_map(|vm| do_json_test_for(vm, json_data))
.collect()
}
fn do_json_test_for(vm_type: &VMType, json_data: &[u8]) -> Vec<String> {
let tests = ethjson::vm::Test::load(json_data).unwrap();
let mut failed = Vec::new();
for (name, vm) in tests.into_iter() {
println!("name: {:?}", name);
let mut fail = false;
let mut fail_unless = |cond: bool, s: &str | if!cond &&!fail {
failed.push(format!("[{}] {}: {}", vm_type, name, s));
fail = true
};
macro_rules! try_fail {
($e: expr) => {
match $e {
Ok(x) => x,
Err(e) => {
let msg = format!("Internal error: {}", e);
fail_unless(false, &msg);
continue
}
}
}
}
let out_of_gas = vm.out_of_gas();
let mut state = get_temp_state();
state.populate_from(From::from(vm.pre_state.clone()));
let info = From::from(vm.env);
let machine = {
let mut machine = ::ethereum::new_frontier_test_machine();
machine.set_schedule_creation_rules(Box::new(move |s, _| s.max_depth = 1));
machine
};
let params = ActionParams::from(vm.transaction);
let mut substate = Substate::new();
let mut tracer = NoopTracer;
let mut vm_tracer = NoopVMTracer;
let mut output = vec![];
let vm_factory = state.vm_factory();
// execute
let (res, callcreates) = {
let mut ex = try_fail!(TestExt::new(
&mut state,
&info,
&machine,
0,
OriginInfo::from(¶ms),
&mut substate,
OutputPolicy::Return(BytesRef::Flexible(&mut output), None),
params.address.clone(),
&mut tracer,
&mut vm_tracer,
));
let mut evm = vm_factory.create(params.gas);
let res = evm.exec(params, &mut ex);
// a return in finalize will not alter callcreates
let callcreates = ex.callcreates.clone();
(res.finalize(ex), callcreates)
};
let log_hash = {
let mut rlp = RlpStream::new_list(substate.logs.len());
for l in &substate.logs {
rlp.append(l);
}
keccak(&rlp.drain())
};
match res {
Err(_) => fail_unless(out_of_gas, "didn't expect to run out of gas."),
Ok(res) => {
fail_unless(!out_of_gas, "expected to run out of gas.");
fail_unless(Some(res.gas_left) == vm.gas_left.map(Into::into), "gas_left is incorrect");
let vm_output: Option<Vec<u8>> = vm.output.map(Into::into);
fail_unless(Some(output) == vm_output, "output is incorrect");
fail_unless(Some(log_hash) == vm.logs.map(|h| h.0), "logs are incorrect");
for (address, account) in vm.post_state.unwrap().into_iter() {
let address = address.into();
let code: Vec<u8> = account.code.into();
let found_code = try_fail!(state.code(&address));
let found_balance = try_fail!(state.balance(&address));
let found_nonce = try_fail!(state.nonce(&address));
fail_unless(found_code.as_ref().map_or_else(|| code.is_empty(), |c| &**c == &code), "code is incorrect");
fail_unless(found_balance == account.balance.into(), "balance is incorrect");
fail_unless(found_nonce == account.nonce.into(), "nonce is incorrect");
for (k, v) in account.storage {
let key: U256 = k.into();
let value: U256 = v.into();
let found_storage = try_fail!(state.storage_at(&address, &From::from(key)));
fail_unless(found_storage == From::from(value), "storage is incorrect");
}
}
let calls: Option<Vec<CallCreate>> = vm.calls.map(|c| c.into_iter().map(From::from).collect());
fail_unless(Some(callcreates) == calls, "callcreates does not match");
}
};
}
for f in &failed {
println!("FAILED: {:?}", f);
}
failed
}
declare_test!{ExecutiveTests_vmArithmeticTest, "VMTests/vmArithmeticTest"}
declare_test!{ExecutiveTests_vmBitwiseLogicOperationTest, "VMTests/vmBitwiseLogicOperation"}
declare_test!{ExecutiveTests_vmBlockInfoTest, "VMTests/vmBlockInfoTest"}
// TODO [todr] Fails with Signal 11 when using JIT
declare_test!{ExecutiveTests_vmEnvironmentalInfoTest, "VMTests/vmEnvironmentalInfo"}
declare_test!{ExecutiveTests_vmIOandFlowOperationsTest, "VMTests/vmIOandFlowOperations"}
declare_test!{ExecutiveTests_vmLogTest, "VMTests/vmLogTest"}
declare_test!{heavy => ExecutiveTests_vmPerformance, "VMTests/vmPerformance"}
declare_test!{ExecutiveTests_vmPushDupSwapTest, "VMTests/vmPushDupSwapTest"}
declare_test!{ExecutiveTests_vmRandomTest, "VMTests/vmRandomTest"}
declare_test!{ExecutiveTests_vmSha3Test, "VMTests/vmSha3Test"}
declare_test!{ExecutiveTests_vmSystemOperationsTest, "VMTests/vmSystemOperations"}
declare_test!{ExecutiveTests_vmTests, "VMTests/vmTests"} | use rlp::RlpStream;
use hash::keccak; | random_line_split |
executive.rs | // Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use super::test_common::*;
use state::{Backend as StateBackend, State, Substate};
use executive::*;
use evm::{VMType, Finalize};
use vm::{
self, ActionParams, CallType, Schedule, Ext,
ContractCreateResult, EnvInfo, MessageCallResult,
CreateContractAddress, ReturnData,
};
use externalities::*;
use tests::helpers::*;
use ethjson;
use trace::{Tracer, NoopTracer};
use trace::{VMTracer, NoopVMTracer};
use bytes::{Bytes, BytesRef};
use trie;
use rlp::RlpStream;
use hash::keccak;
use machine::EthereumMachine as Machine;
#[derive(Debug, PartialEq, Clone)]
struct CallCreate {
data: Bytes,
destination: Option<Address>,
gas_limit: U256,
value: U256
}
impl From<ethjson::vm::Call> for CallCreate {
fn from(c: ethjson::vm::Call) -> Self {
let dst: Option<ethjson::hash::Address> = c.destination.into();
CallCreate {
data: c.data.into(),
destination: dst.map(Into::into),
gas_limit: c.gas_limit.into(),
value: c.value.into()
}
}
}
/// Tiny wrapper around executive externalities.
/// Stores callcreates.
struct TestExt<'a, T: 'a, V: 'a, B: 'a>
where T: Tracer, V: VMTracer, B: StateBackend
{
ext: Externalities<'a, T, V, B>,
callcreates: Vec<CallCreate>,
nonce: U256,
sender: Address,
}
impl<'a, T: 'a, V: 'a, B: 'a> TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend,
{
fn new(
state: &'a mut State<B>,
info: &'a EnvInfo,
machine: &'a Machine,
depth: usize,
origin_info: OriginInfo,
substate: &'a mut Substate,
output: OutputPolicy<'a, 'a>,
address: Address,
tracer: &'a mut T,
vm_tracer: &'a mut V,
) -> trie::Result<Self> {
let static_call = false;
Ok(TestExt {
nonce: state.nonce(&address)?,
ext: Externalities::new(state, info, machine, depth, origin_info, substate, output, tracer, vm_tracer, static_call),
callcreates: vec![],
sender: address,
})
}
}
impl<'a, T: 'a, V: 'a, B: 'a> Ext for TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend
{
fn storage_at(&self, key: &H256) -> vm::Result<H256> {
self.ext.storage_at(key)
}
fn set_storage(&mut self, key: H256, value: H256) -> vm::Result<()> {
self.ext.set_storage(key, value)
}
fn exists(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists(address)
}
fn exists_and_not_null(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists_and_not_null(address)
}
fn balance(&self, address: &Address) -> vm::Result<U256> {
self.ext.balance(address)
}
fn origin_balance(&self) -> vm::Result<U256> {
self.ext.origin_balance()
}
fn blockhash(&mut self, number: &U256) -> H256 {
self.ext.blockhash(number)
}
fn create(&mut self, gas: &U256, value: &U256, code: &[u8], address: CreateContractAddress) -> ContractCreateResult {
self.callcreates.push(CallCreate {
data: code.to_vec(),
destination: None,
gas_limit: *gas,
value: *value
});
let contract_address = contract_address(address, &self.sender, &self.nonce, &code).0;
ContractCreateResult::Created(contract_address, *gas)
}
fn call(&mut self,
gas: &U256,
_sender_address: &Address,
receive_address: &Address,
value: Option<U256>,
data: &[u8],
_code_address: &Address,
_output: &mut [u8],
_call_type: CallType
) -> MessageCallResult {
self.callcreates.push(CallCreate {
data: data.to_vec(),
destination: Some(receive_address.clone()),
gas_limit: *gas,
value: value.unwrap()
});
MessageCallResult::Success(*gas, ReturnData::empty())
}
fn extcode(&self, address: &Address) -> vm::Result<Arc<Bytes>> {
self.ext.extcode(address)
}
fn extcodesize(&self, address: &Address) -> vm::Result<usize> {
self.ext.extcodesize(address)
}
fn log(&mut self, topics: Vec<H256>, data: &[u8]) -> vm::Result<()> {
self.ext.log(topics, data)
}
fn ret(self, gas: &U256, data: &ReturnData, apply_state: bool) -> Result<U256, vm::Error> {
self.ext.ret(gas, data, apply_state)
}
fn suicide(&mut self, refund_address: &Address) -> vm::Result<()> {
self.ext.suicide(refund_address)
}
fn schedule(&self) -> &Schedule {
self.ext.schedule()
}
fn env_info(&self) -> &EnvInfo {
self.ext.env_info()
}
fn depth(&self) -> usize {
0
}
fn is_static(&self) -> bool {
false
}
fn inc_sstore_clears(&mut self) {
self.ext.inc_sstore_clears()
}
}
fn do_json_test(json_data: &[u8]) -> Vec<String> {
let vms = VMType::all();
vms
.iter()
.flat_map(|vm| do_json_test_for(vm, json_data))
.collect()
}
fn do_json_test_for(vm_type: &VMType, json_data: &[u8]) -> Vec<String> {
let tests = ethjson::vm::Test::load(json_data).unwrap();
let mut failed = Vec::new();
for (name, vm) in tests.into_iter() {
println!("name: {:?}", name);
let mut fail = false;
let mut fail_unless = |cond: bool, s: &str | if!cond &&!fail | ;
macro_rules! try_fail {
($e: expr) => {
match $e {
Ok(x) => x,
Err(e) => {
let msg = format!("Internal error: {}", e);
fail_unless(false, &msg);
continue
}
}
}
}
let out_of_gas = vm.out_of_gas();
let mut state = get_temp_state();
state.populate_from(From::from(vm.pre_state.clone()));
let info = From::from(vm.env);
let machine = {
let mut machine = ::ethereum::new_frontier_test_machine();
machine.set_schedule_creation_rules(Box::new(move |s, _| s.max_depth = 1));
machine
};
let params = ActionParams::from(vm.transaction);
let mut substate = Substate::new();
let mut tracer = NoopTracer;
let mut vm_tracer = NoopVMTracer;
let mut output = vec![];
let vm_factory = state.vm_factory();
// execute
let (res, callcreates) = {
let mut ex = try_fail!(TestExt::new(
&mut state,
&info,
&machine,
0,
OriginInfo::from(¶ms),
&mut substate,
OutputPolicy::Return(BytesRef::Flexible(&mut output), None),
params.address.clone(),
&mut tracer,
&mut vm_tracer,
));
let mut evm = vm_factory.create(params.gas);
let res = evm.exec(params, &mut ex);
// a return in finalize will not alter callcreates
let callcreates = ex.callcreates.clone();
(res.finalize(ex), callcreates)
};
let log_hash = {
let mut rlp = RlpStream::new_list(substate.logs.len());
for l in &substate.logs {
rlp.append(l);
}
keccak(&rlp.drain())
};
match res {
Err(_) => fail_unless(out_of_gas, "didn't expect to run out of gas."),
Ok(res) => {
fail_unless(!out_of_gas, "expected to run out of gas.");
fail_unless(Some(res.gas_left) == vm.gas_left.map(Into::into), "gas_left is incorrect");
let vm_output: Option<Vec<u8>> = vm.output.map(Into::into);
fail_unless(Some(output) == vm_output, "output is incorrect");
fail_unless(Some(log_hash) == vm.logs.map(|h| h.0), "logs are incorrect");
for (address, account) in vm.post_state.unwrap().into_iter() {
let address = address.into();
let code: Vec<u8> = account.code.into();
let found_code = try_fail!(state.code(&address));
let found_balance = try_fail!(state.balance(&address));
let found_nonce = try_fail!(state.nonce(&address));
fail_unless(found_code.as_ref().map_or_else(|| code.is_empty(), |c| &**c == &code), "code is incorrect");
fail_unless(found_balance == account.balance.into(), "balance is incorrect");
fail_unless(found_nonce == account.nonce.into(), "nonce is incorrect");
for (k, v) in account.storage {
let key: U256 = k.into();
let value: U256 = v.into();
let found_storage = try_fail!(state.storage_at(&address, &From::from(key)));
fail_unless(found_storage == From::from(value), "storage is incorrect");
}
}
let calls: Option<Vec<CallCreate>> = vm.calls.map(|c| c.into_iter().map(From::from).collect());
fail_unless(Some(callcreates) == calls, "callcreates does not match");
}
};
}
for f in &failed {
println!("FAILED: {:?}", f);
}
failed
}
declare_test!{ExecutiveTests_vmArithmeticTest, "VMTests/vmArithmeticTest"}
declare_test!{ExecutiveTests_vmBitwiseLogicOperationTest, "VMTests/vmBitwiseLogicOperation"}
declare_test!{ExecutiveTests_vmBlockInfoTest, "VMTests/vmBlockInfoTest"}
// TODO [todr] Fails with Signal 11 when using JIT
declare_test!{ExecutiveTests_vmEnvironmentalInfoTest, "VMTests/vmEnvironmentalInfo"}
declare_test!{ExecutiveTests_vmIOandFlowOperationsTest, "VMTests/vmIOandFlowOperations"}
declare_test!{ExecutiveTests_vmLogTest, "VMTests/vmLogTest"}
declare_test!{heavy => ExecutiveTests_vmPerformance, "VMTests/vmPerformance"}
declare_test!{ExecutiveTests_vmPushDupSwapTest, "VMTests/vmPushDupSwapTest"}
declare_test!{ExecutiveTests_vmRandomTest, "VMTests/vmRandomTest"}
declare_test!{ExecutiveTests_vmSha3Test, "VMTests/vmSha3Test"}
declare_test!{ExecutiveTests_vmSystemOperationsTest, "VMTests/vmSystemOperations"}
declare_test!{ExecutiveTests_vmTests, "VMTests/vmTests"}
| {
failed.push(format!("[{}] {}: {}", vm_type, name, s));
fail = true
} | conditional_block |
executive.rs | // Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use super::test_common::*;
use state::{Backend as StateBackend, State, Substate};
use executive::*;
use evm::{VMType, Finalize};
use vm::{
self, ActionParams, CallType, Schedule, Ext,
ContractCreateResult, EnvInfo, MessageCallResult,
CreateContractAddress, ReturnData,
};
use externalities::*;
use tests::helpers::*;
use ethjson;
use trace::{Tracer, NoopTracer};
use trace::{VMTracer, NoopVMTracer};
use bytes::{Bytes, BytesRef};
use trie;
use rlp::RlpStream;
use hash::keccak;
use machine::EthereumMachine as Machine;
#[derive(Debug, PartialEq, Clone)]
struct CallCreate {
data: Bytes,
destination: Option<Address>,
gas_limit: U256,
value: U256
}
impl From<ethjson::vm::Call> for CallCreate {
fn from(c: ethjson::vm::Call) -> Self {
let dst: Option<ethjson::hash::Address> = c.destination.into();
CallCreate {
data: c.data.into(),
destination: dst.map(Into::into),
gas_limit: c.gas_limit.into(),
value: c.value.into()
}
}
}
/// Tiny wrapper around executive externalities.
/// Stores callcreates.
struct TestExt<'a, T: 'a, V: 'a, B: 'a>
where T: Tracer, V: VMTracer, B: StateBackend
{
ext: Externalities<'a, T, V, B>,
callcreates: Vec<CallCreate>,
nonce: U256,
sender: Address,
}
impl<'a, T: 'a, V: 'a, B: 'a> TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend,
{
fn new(
state: &'a mut State<B>,
info: &'a EnvInfo,
machine: &'a Machine,
depth: usize,
origin_info: OriginInfo,
substate: &'a mut Substate,
output: OutputPolicy<'a, 'a>,
address: Address,
tracer: &'a mut T,
vm_tracer: &'a mut V,
) -> trie::Result<Self> {
let static_call = false;
Ok(TestExt {
nonce: state.nonce(&address)?,
ext: Externalities::new(state, info, machine, depth, origin_info, substate, output, tracer, vm_tracer, static_call),
callcreates: vec![],
sender: address,
})
}
}
impl<'a, T: 'a, V: 'a, B: 'a> Ext for TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend
{
fn storage_at(&self, key: &H256) -> vm::Result<H256> {
self.ext.storage_at(key)
}
fn set_storage(&mut self, key: H256, value: H256) -> vm::Result<()> {
self.ext.set_storage(key, value)
}
fn exists(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists(address)
}
fn exists_and_not_null(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists_and_not_null(address)
}
fn balance(&self, address: &Address) -> vm::Result<U256> {
self.ext.balance(address)
}
fn origin_balance(&self) -> vm::Result<U256> {
self.ext.origin_balance()
}
fn blockhash(&mut self, number: &U256) -> H256 {
self.ext.blockhash(number)
}
fn create(&mut self, gas: &U256, value: &U256, code: &[u8], address: CreateContractAddress) -> ContractCreateResult {
self.callcreates.push(CallCreate {
data: code.to_vec(),
destination: None,
gas_limit: *gas,
value: *value
});
let contract_address = contract_address(address, &self.sender, &self.nonce, &code).0;
ContractCreateResult::Created(contract_address, *gas)
}
fn call(&mut self,
gas: &U256,
_sender_address: &Address,
receive_address: &Address,
value: Option<U256>,
data: &[u8],
_code_address: &Address,
_output: &mut [u8],
_call_type: CallType
) -> MessageCallResult {
self.callcreates.push(CallCreate {
data: data.to_vec(),
destination: Some(receive_address.clone()),
gas_limit: *gas,
value: value.unwrap()
});
MessageCallResult::Success(*gas, ReturnData::empty())
}
fn extcode(&self, address: &Address) -> vm::Result<Arc<Bytes>> {
self.ext.extcode(address)
}
fn | (&self, address: &Address) -> vm::Result<usize> {
self.ext.extcodesize(address)
}
fn log(&mut self, topics: Vec<H256>, data: &[u8]) -> vm::Result<()> {
self.ext.log(topics, data)
}
fn ret(self, gas: &U256, data: &ReturnData, apply_state: bool) -> Result<U256, vm::Error> {
self.ext.ret(gas, data, apply_state)
}
fn suicide(&mut self, refund_address: &Address) -> vm::Result<()> {
self.ext.suicide(refund_address)
}
fn schedule(&self) -> &Schedule {
self.ext.schedule()
}
fn env_info(&self) -> &EnvInfo {
self.ext.env_info()
}
fn depth(&self) -> usize {
0
}
fn is_static(&self) -> bool {
false
}
fn inc_sstore_clears(&mut self) {
self.ext.inc_sstore_clears()
}
}
fn do_json_test(json_data: &[u8]) -> Vec<String> {
let vms = VMType::all();
vms
.iter()
.flat_map(|vm| do_json_test_for(vm, json_data))
.collect()
}
fn do_json_test_for(vm_type: &VMType, json_data: &[u8]) -> Vec<String> {
let tests = ethjson::vm::Test::load(json_data).unwrap();
let mut failed = Vec::new();
for (name, vm) in tests.into_iter() {
println!("name: {:?}", name);
let mut fail = false;
let mut fail_unless = |cond: bool, s: &str | if!cond &&!fail {
failed.push(format!("[{}] {}: {}", vm_type, name, s));
fail = true
};
macro_rules! try_fail {
($e: expr) => {
match $e {
Ok(x) => x,
Err(e) => {
let msg = format!("Internal error: {}", e);
fail_unless(false, &msg);
continue
}
}
}
}
let out_of_gas = vm.out_of_gas();
let mut state = get_temp_state();
state.populate_from(From::from(vm.pre_state.clone()));
let info = From::from(vm.env);
let machine = {
let mut machine = ::ethereum::new_frontier_test_machine();
machine.set_schedule_creation_rules(Box::new(move |s, _| s.max_depth = 1));
machine
};
let params = ActionParams::from(vm.transaction);
let mut substate = Substate::new();
let mut tracer = NoopTracer;
let mut vm_tracer = NoopVMTracer;
let mut output = vec![];
let vm_factory = state.vm_factory();
// execute
let (res, callcreates) = {
let mut ex = try_fail!(TestExt::new(
&mut state,
&info,
&machine,
0,
OriginInfo::from(¶ms),
&mut substate,
OutputPolicy::Return(BytesRef::Flexible(&mut output), None),
params.address.clone(),
&mut tracer,
&mut vm_tracer,
));
let mut evm = vm_factory.create(params.gas);
let res = evm.exec(params, &mut ex);
// a return in finalize will not alter callcreates
let callcreates = ex.callcreates.clone();
(res.finalize(ex), callcreates)
};
let log_hash = {
let mut rlp = RlpStream::new_list(substate.logs.len());
for l in &substate.logs {
rlp.append(l);
}
keccak(&rlp.drain())
};
match res {
Err(_) => fail_unless(out_of_gas, "didn't expect to run out of gas."),
Ok(res) => {
fail_unless(!out_of_gas, "expected to run out of gas.");
fail_unless(Some(res.gas_left) == vm.gas_left.map(Into::into), "gas_left is incorrect");
let vm_output: Option<Vec<u8>> = vm.output.map(Into::into);
fail_unless(Some(output) == vm_output, "output is incorrect");
fail_unless(Some(log_hash) == vm.logs.map(|h| h.0), "logs are incorrect");
for (address, account) in vm.post_state.unwrap().into_iter() {
let address = address.into();
let code: Vec<u8> = account.code.into();
let found_code = try_fail!(state.code(&address));
let found_balance = try_fail!(state.balance(&address));
let found_nonce = try_fail!(state.nonce(&address));
fail_unless(found_code.as_ref().map_or_else(|| code.is_empty(), |c| &**c == &code), "code is incorrect");
fail_unless(found_balance == account.balance.into(), "balance is incorrect");
fail_unless(found_nonce == account.nonce.into(), "nonce is incorrect");
for (k, v) in account.storage {
let key: U256 = k.into();
let value: U256 = v.into();
let found_storage = try_fail!(state.storage_at(&address, &From::from(key)));
fail_unless(found_storage == From::from(value), "storage is incorrect");
}
}
let calls: Option<Vec<CallCreate>> = vm.calls.map(|c| c.into_iter().map(From::from).collect());
fail_unless(Some(callcreates) == calls, "callcreates does not match");
}
};
}
for f in &failed {
println!("FAILED: {:?}", f);
}
failed
}
declare_test!{ExecutiveTests_vmArithmeticTest, "VMTests/vmArithmeticTest"}
declare_test!{ExecutiveTests_vmBitwiseLogicOperationTest, "VMTests/vmBitwiseLogicOperation"}
declare_test!{ExecutiveTests_vmBlockInfoTest, "VMTests/vmBlockInfoTest"}
// TODO [todr] Fails with Signal 11 when using JIT
declare_test!{ExecutiveTests_vmEnvironmentalInfoTest, "VMTests/vmEnvironmentalInfo"}
declare_test!{ExecutiveTests_vmIOandFlowOperationsTest, "VMTests/vmIOandFlowOperations"}
declare_test!{ExecutiveTests_vmLogTest, "VMTests/vmLogTest"}
declare_test!{heavy => ExecutiveTests_vmPerformance, "VMTests/vmPerformance"}
declare_test!{ExecutiveTests_vmPushDupSwapTest, "VMTests/vmPushDupSwapTest"}
declare_test!{ExecutiveTests_vmRandomTest, "VMTests/vmRandomTest"}
declare_test!{ExecutiveTests_vmSha3Test, "VMTests/vmSha3Test"}
declare_test!{ExecutiveTests_vmSystemOperationsTest, "VMTests/vmSystemOperations"}
declare_test!{ExecutiveTests_vmTests, "VMTests/vmTests"}
| extcodesize | identifier_name |
executive.rs | // Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use super::test_common::*;
use state::{Backend as StateBackend, State, Substate};
use executive::*;
use evm::{VMType, Finalize};
use vm::{
self, ActionParams, CallType, Schedule, Ext,
ContractCreateResult, EnvInfo, MessageCallResult,
CreateContractAddress, ReturnData,
};
use externalities::*;
use tests::helpers::*;
use ethjson;
use trace::{Tracer, NoopTracer};
use trace::{VMTracer, NoopVMTracer};
use bytes::{Bytes, BytesRef};
use trie;
use rlp::RlpStream;
use hash::keccak;
use machine::EthereumMachine as Machine;
#[derive(Debug, PartialEq, Clone)]
struct CallCreate {
data: Bytes,
destination: Option<Address>,
gas_limit: U256,
value: U256
}
impl From<ethjson::vm::Call> for CallCreate {
fn from(c: ethjson::vm::Call) -> Self {
let dst: Option<ethjson::hash::Address> = c.destination.into();
CallCreate {
data: c.data.into(),
destination: dst.map(Into::into),
gas_limit: c.gas_limit.into(),
value: c.value.into()
}
}
}
/// Tiny wrapper around executive externalities.
/// Stores callcreates.
struct TestExt<'a, T: 'a, V: 'a, B: 'a>
where T: Tracer, V: VMTracer, B: StateBackend
{
ext: Externalities<'a, T, V, B>,
callcreates: Vec<CallCreate>,
nonce: U256,
sender: Address,
}
impl<'a, T: 'a, V: 'a, B: 'a> TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend,
{
fn new(
state: &'a mut State<B>,
info: &'a EnvInfo,
machine: &'a Machine,
depth: usize,
origin_info: OriginInfo,
substate: &'a mut Substate,
output: OutputPolicy<'a, 'a>,
address: Address,
tracer: &'a mut T,
vm_tracer: &'a mut V,
) -> trie::Result<Self> {
let static_call = false;
Ok(TestExt {
nonce: state.nonce(&address)?,
ext: Externalities::new(state, info, machine, depth, origin_info, substate, output, tracer, vm_tracer, static_call),
callcreates: vec![],
sender: address,
})
}
}
impl<'a, T: 'a, V: 'a, B: 'a> Ext for TestExt<'a, T, V, B>
where T: Tracer, V: VMTracer, B: StateBackend
{
fn storage_at(&self, key: &H256) -> vm::Result<H256> {
self.ext.storage_at(key)
}
fn set_storage(&mut self, key: H256, value: H256) -> vm::Result<()> {
self.ext.set_storage(key, value)
}
fn exists(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists(address)
}
fn exists_and_not_null(&self, address: &Address) -> vm::Result<bool> {
self.ext.exists_and_not_null(address)
}
fn balance(&self, address: &Address) -> vm::Result<U256> {
self.ext.balance(address)
}
fn origin_balance(&self) -> vm::Result<U256> {
self.ext.origin_balance()
}
fn blockhash(&mut self, number: &U256) -> H256 {
self.ext.blockhash(number)
}
fn create(&mut self, gas: &U256, value: &U256, code: &[u8], address: CreateContractAddress) -> ContractCreateResult {
self.callcreates.push(CallCreate {
data: code.to_vec(),
destination: None,
gas_limit: *gas,
value: *value
});
let contract_address = contract_address(address, &self.sender, &self.nonce, &code).0;
ContractCreateResult::Created(contract_address, *gas)
}
fn call(&mut self,
gas: &U256,
_sender_address: &Address,
receive_address: &Address,
value: Option<U256>,
data: &[u8],
_code_address: &Address,
_output: &mut [u8],
_call_type: CallType
) -> MessageCallResult {
self.callcreates.push(CallCreate {
data: data.to_vec(),
destination: Some(receive_address.clone()),
gas_limit: *gas,
value: value.unwrap()
});
MessageCallResult::Success(*gas, ReturnData::empty())
}
fn extcode(&self, address: &Address) -> vm::Result<Arc<Bytes>> {
self.ext.extcode(address)
}
fn extcodesize(&self, address: &Address) -> vm::Result<usize> {
self.ext.extcodesize(address)
}
fn log(&mut self, topics: Vec<H256>, data: &[u8]) -> vm::Result<()> {
self.ext.log(topics, data)
}
fn ret(self, gas: &U256, data: &ReturnData, apply_state: bool) -> Result<U256, vm::Error> {
self.ext.ret(gas, data, apply_state)
}
fn suicide(&mut self, refund_address: &Address) -> vm::Result<()> {
self.ext.suicide(refund_address)
}
fn schedule(&self) -> &Schedule |
fn env_info(&self) -> &EnvInfo {
self.ext.env_info()
}
fn depth(&self) -> usize {
0
}
fn is_static(&self) -> bool {
false
}
fn inc_sstore_clears(&mut self) {
self.ext.inc_sstore_clears()
}
}
fn do_json_test(json_data: &[u8]) -> Vec<String> {
let vms = VMType::all();
vms
.iter()
.flat_map(|vm| do_json_test_for(vm, json_data))
.collect()
}
fn do_json_test_for(vm_type: &VMType, json_data: &[u8]) -> Vec<String> {
let tests = ethjson::vm::Test::load(json_data).unwrap();
let mut failed = Vec::new();
for (name, vm) in tests.into_iter() {
println!("name: {:?}", name);
let mut fail = false;
let mut fail_unless = |cond: bool, s: &str | if!cond &&!fail {
failed.push(format!("[{}] {}: {}", vm_type, name, s));
fail = true
};
macro_rules! try_fail {
($e: expr) => {
match $e {
Ok(x) => x,
Err(e) => {
let msg = format!("Internal error: {}", e);
fail_unless(false, &msg);
continue
}
}
}
}
let out_of_gas = vm.out_of_gas();
let mut state = get_temp_state();
state.populate_from(From::from(vm.pre_state.clone()));
let info = From::from(vm.env);
let machine = {
let mut machine = ::ethereum::new_frontier_test_machine();
machine.set_schedule_creation_rules(Box::new(move |s, _| s.max_depth = 1));
machine
};
let params = ActionParams::from(vm.transaction);
let mut substate = Substate::new();
let mut tracer = NoopTracer;
let mut vm_tracer = NoopVMTracer;
let mut output = vec![];
let vm_factory = state.vm_factory();
// execute
let (res, callcreates) = {
let mut ex = try_fail!(TestExt::new(
&mut state,
&info,
&machine,
0,
OriginInfo::from(¶ms),
&mut substate,
OutputPolicy::Return(BytesRef::Flexible(&mut output), None),
params.address.clone(),
&mut tracer,
&mut vm_tracer,
));
let mut evm = vm_factory.create(params.gas);
let res = evm.exec(params, &mut ex);
// a return in finalize will not alter callcreates
let callcreates = ex.callcreates.clone();
(res.finalize(ex), callcreates)
};
let log_hash = {
let mut rlp = RlpStream::new_list(substate.logs.len());
for l in &substate.logs {
rlp.append(l);
}
keccak(&rlp.drain())
};
match res {
Err(_) => fail_unless(out_of_gas, "didn't expect to run out of gas."),
Ok(res) => {
fail_unless(!out_of_gas, "expected to run out of gas.");
fail_unless(Some(res.gas_left) == vm.gas_left.map(Into::into), "gas_left is incorrect");
let vm_output: Option<Vec<u8>> = vm.output.map(Into::into);
fail_unless(Some(output) == vm_output, "output is incorrect");
fail_unless(Some(log_hash) == vm.logs.map(|h| h.0), "logs are incorrect");
for (address, account) in vm.post_state.unwrap().into_iter() {
let address = address.into();
let code: Vec<u8> = account.code.into();
let found_code = try_fail!(state.code(&address));
let found_balance = try_fail!(state.balance(&address));
let found_nonce = try_fail!(state.nonce(&address));
fail_unless(found_code.as_ref().map_or_else(|| code.is_empty(), |c| &**c == &code), "code is incorrect");
fail_unless(found_balance == account.balance.into(), "balance is incorrect");
fail_unless(found_nonce == account.nonce.into(), "nonce is incorrect");
for (k, v) in account.storage {
let key: U256 = k.into();
let value: U256 = v.into();
let found_storage = try_fail!(state.storage_at(&address, &From::from(key)));
fail_unless(found_storage == From::from(value), "storage is incorrect");
}
}
let calls: Option<Vec<CallCreate>> = vm.calls.map(|c| c.into_iter().map(From::from).collect());
fail_unless(Some(callcreates) == calls, "callcreates does not match");
}
};
}
for f in &failed {
println!("FAILED: {:?}", f);
}
failed
}
declare_test!{ExecutiveTests_vmArithmeticTest, "VMTests/vmArithmeticTest"}
declare_test!{ExecutiveTests_vmBitwiseLogicOperationTest, "VMTests/vmBitwiseLogicOperation"}
declare_test!{ExecutiveTests_vmBlockInfoTest, "VMTests/vmBlockInfoTest"}
// TODO [todr] Fails with Signal 11 when using JIT
declare_test!{ExecutiveTests_vmEnvironmentalInfoTest, "VMTests/vmEnvironmentalInfo"}
declare_test!{ExecutiveTests_vmIOandFlowOperationsTest, "VMTests/vmIOandFlowOperations"}
declare_test!{ExecutiveTests_vmLogTest, "VMTests/vmLogTest"}
declare_test!{heavy => ExecutiveTests_vmPerformance, "VMTests/vmPerformance"}
declare_test!{ExecutiveTests_vmPushDupSwapTest, "VMTests/vmPushDupSwapTest"}
declare_test!{ExecutiveTests_vmRandomTest, "VMTests/vmRandomTest"}
declare_test!{ExecutiveTests_vmSha3Test, "VMTests/vmSha3Test"}
declare_test!{ExecutiveTests_vmSystemOperationsTest, "VMTests/vmSystemOperations"}
declare_test!{ExecutiveTests_vmTests, "VMTests/vmTests"}
| {
self.ext.schedule()
} | identifier_body |
300-custom-types.rs | #[derive(Debug)]
struct Person {
name: String,
age: u8,
}
// A unit struct
struct Unit;
// A tuple struct
struct | (i32, f32);
// A struct with two fields
struct Point {
x: f32,
y: f32,
}
// Structs can be reused as fields of another struct
#[allow(dead_code)]
struct Rectangle {
// A rectangle can be specified by where the top left and bottom right
// corners are in space.
top_left: Point,
bottom_right: Point,
}
fn main() {
// Create struct with field init shorthand
let name = String::from("Peter");
let age = 27;
let peter = Person { name, age };
// Print debug struct
println!("{:?}", peter);
// Instantiate a `Point`
let point: Point = Point { x: 10.3, y: 0.4 };
// Access the fields of the point
println!("point coordinates: ({}, {})", point.x, point.y);
// Make a new point by using struct update syntax to use the fields of our
// other one
let bottom_right = Point { x: 5.2,..point };
// `bottom_right.y` will be the same as `point.y` because we used that field
// from `point`
println!("second point: ({}, {})", bottom_right.x, bottom_right.y);
// Destructure the point using a `let` binding
let Point {
x: left_edge,
y: top_edge,
} = point;
let _rectangle = Rectangle {
// struct instantiation is an expression too
top_left: Point {
x: left_edge,
y: top_edge,
},
bottom_right,
};
// Instantiate a unit struct
let _unit = Unit;
// Instantiate a tuple struct
let pair = Pair(1, 0.1);
// Access the fields of a tuple struct
println!("pair contains {:?} and {:?}", pair.0, pair.1);
// Destructure a tuple struct
let Pair(integer, decimal) = pair;
println!("pair contains {:?} and {:?}", integer, decimal);
}
| Pair | identifier_name |
300-custom-types.rs | #[derive(Debug)]
struct Person {
name: String,
age: u8,
}
// A unit struct
struct Unit;
// A tuple struct
struct Pair(i32, f32);
// A struct with two fields
struct Point {
x: f32,
y: f32,
}
// Structs can be reused as fields of another struct
#[allow(dead_code)]
struct Rectangle {
// A rectangle can be specified by where the top left and bottom right
// corners are in space.
top_left: Point,
bottom_right: Point,
}
fn main() {
// Create struct with field init shorthand
let name = String::from("Peter");
let age = 27;
let peter = Person { name, age };
// Print debug struct
println!("{:?}", peter);
// Instantiate a `Point` |
// Access the fields of the point
println!("point coordinates: ({}, {})", point.x, point.y);
// Make a new point by using struct update syntax to use the fields of our
// other one
let bottom_right = Point { x: 5.2,..point };
// `bottom_right.y` will be the same as `point.y` because we used that field
// from `point`
println!("second point: ({}, {})", bottom_right.x, bottom_right.y);
// Destructure the point using a `let` binding
let Point {
x: left_edge,
y: top_edge,
} = point;
let _rectangle = Rectangle {
// struct instantiation is an expression too
top_left: Point {
x: left_edge,
y: top_edge,
},
bottom_right,
};
// Instantiate a unit struct
let _unit = Unit;
// Instantiate a tuple struct
let pair = Pair(1, 0.1);
// Access the fields of a tuple struct
println!("pair contains {:?} and {:?}", pair.0, pair.1);
// Destructure a tuple struct
let Pair(integer, decimal) = pair;
println!("pair contains {:?} and {:?}", integer, decimal);
} | let point: Point = Point { x: 10.3, y: 0.4 }; | random_line_split |
300-custom-types.rs | #[derive(Debug)]
struct Person {
name: String,
age: u8,
}
// A unit struct
struct Unit;
// A tuple struct
struct Pair(i32, f32);
// A struct with two fields
struct Point {
x: f32,
y: f32,
}
// Structs can be reused as fields of another struct
#[allow(dead_code)]
struct Rectangle {
// A rectangle can be specified by where the top left and bottom right
// corners are in space.
top_left: Point,
bottom_right: Point,
}
fn main() | // from `point`
println!("second point: ({}, {})", bottom_right.x, bottom_right.y);
// Destructure the point using a `let` binding
let Point {
x: left_edge,
y: top_edge,
} = point;
let _rectangle = Rectangle {
// struct instantiation is an expression too
top_left: Point {
x: left_edge,
y: top_edge,
},
bottom_right,
};
// Instantiate a unit struct
let _unit = Unit;
// Instantiate a tuple struct
let pair = Pair(1, 0.1);
// Access the fields of a tuple struct
println!("pair contains {:?} and {:?}", pair.0, pair.1);
// Destructure a tuple struct
let Pair(integer, decimal) = pair;
println!("pair contains {:?} and {:?}", integer, decimal);
}
| {
// Create struct with field init shorthand
let name = String::from("Peter");
let age = 27;
let peter = Person { name, age };
// Print debug struct
println!("{:?}", peter);
// Instantiate a `Point`
let point: Point = Point { x: 10.3, y: 0.4 };
// Access the fields of the point
println!("point coordinates: ({}, {})", point.x, point.y);
// Make a new point by using struct update syntax to use the fields of our
// other one
let bottom_right = Point { x: 5.2, ..point };
// `bottom_right.y` will be the same as `point.y` because we used that field | identifier_body |
as_unsigned.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::slice::IntSliceExt;
// pub trait IntSliceExt<U, S> {
// /// Converts the slice to an immutable slice of unsigned integers with the same width.
// fn as_unsigned<'a>(&'a self) -> &'a [U];
// /// Converts the slice to an immutable slice of signed integers with the same width.
// fn as_signed<'a>(&'a self) -> &'a [S];
//
// /// Converts the slice to a mutable slice of unsigned integers with the same width.
// fn as_unsigned_mut<'a>(&'a mut self) -> &'a mut [U];
// /// Converts the slice to a mutable slice of signed integers with the same width.
// fn as_signed_mut<'a>(&'a mut self) -> &'a mut [S];
// }
// macro_rules! impl_int_slice {
// ($u:ty, $s:ty, $t:ty) => {
// #[unstable(feature = "core")]
// impl IntSliceExt<$u, $s> for [$t] {
// #[inline]
// fn as_unsigned(&self) -> &[$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed(&self) -> &[$s] { unsafe { transmute(self) } }
// #[inline]
// fn as_unsigned_mut(&mut self) -> &mut [$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed_mut(&mut self) -> &mut [$s] { unsafe { transmute(self) } }
// }
// }
// }
// macro_rules! impl_int_slices {
// ($u:ty, $s:ty) => {
// impl_int_slice! { $u, $s, $u }
// impl_int_slice! { $u, $s, $s }
// }
// }
// impl_int_slices! { u64, i64 }
type U = u64;
type S = i64;
type T = U;
#[test]
fn as_unsigned_test1() |
}
| {
let slice: &[T] = &[0xffffffffffffffff];
let as_unsigned: &[U] = slice.as_unsigned();
assert_eq!(as_unsigned[0], 18446744073709551615);
} | identifier_body |
as_unsigned.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::slice::IntSliceExt;
// pub trait IntSliceExt<U, S> {
// /// Converts the slice to an immutable slice of unsigned integers with the same width.
// fn as_unsigned<'a>(&'a self) -> &'a [U];
// /// Converts the slice to an immutable slice of signed integers with the same width.
// fn as_signed<'a>(&'a self) -> &'a [S];
//
// /// Converts the slice to a mutable slice of unsigned integers with the same width.
// fn as_unsigned_mut<'a>(&'a mut self) -> &'a mut [U];
// /// Converts the slice to a mutable slice of signed integers with the same width.
// fn as_signed_mut<'a>(&'a mut self) -> &'a mut [S];
// }
// macro_rules! impl_int_slice {
// ($u:ty, $s:ty, $t:ty) => {
// #[unstable(feature = "core")]
// impl IntSliceExt<$u, $s> for [$t] {
// #[inline]
// fn as_unsigned(&self) -> &[$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed(&self) -> &[$s] { unsafe { transmute(self) } }
// #[inline]
// fn as_unsigned_mut(&mut self) -> &mut [$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed_mut(&mut self) -> &mut [$s] { unsafe { transmute(self) } }
// }
// }
// }
// macro_rules! impl_int_slices {
// ($u:ty, $s:ty) => {
// impl_int_slice! { $u, $s, $u }
// impl_int_slice! { $u, $s, $s }
// }
// }
// impl_int_slices! { u64, i64 }
type U = u64;
type S = i64;
type T = U;
#[test]
fn | () {
let slice: &[T] = &[0xffffffffffffffff];
let as_unsigned: &[U] = slice.as_unsigned();
assert_eq!(as_unsigned[0], 18446744073709551615);
}
}
| as_unsigned_test1 | identifier_name |
as_unsigned.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::slice::IntSliceExt;
// pub trait IntSliceExt<U, S> {
// /// Converts the slice to an immutable slice of unsigned integers with the same width.
// fn as_unsigned<'a>(&'a self) -> &'a [U];
// /// Converts the slice to an immutable slice of signed integers with the same width.
// fn as_signed<'a>(&'a self) -> &'a [S];
//
// /// Converts the slice to a mutable slice of unsigned integers with the same width.
// fn as_unsigned_mut<'a>(&'a mut self) -> &'a mut [U];
// /// Converts the slice to a mutable slice of signed integers with the same width.
// fn as_signed_mut<'a>(&'a mut self) -> &'a mut [S];
// }
// macro_rules! impl_int_slice {
// ($u:ty, $s:ty, $t:ty) => {
// #[unstable(feature = "core")]
// impl IntSliceExt<$u, $s> for [$t] {
// #[inline]
// fn as_unsigned(&self) -> &[$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed(&self) -> &[$s] { unsafe { transmute(self) } }
// #[inline]
// fn as_unsigned_mut(&mut self) -> &mut [$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed_mut(&mut self) -> &mut [$s] { unsafe { transmute(self) } }
// }
// }
// }
// macro_rules! impl_int_slices {
// ($u:ty, $s:ty) => {
// impl_int_slice! { $u, $s, $u }
// impl_int_slice! { $u, $s, $s }
// }
// }
// impl_int_slices! { u64, i64 }
type U = u64;
type S = i64;
type T = U;
|
assert_eq!(as_unsigned[0], 18446744073709551615);
}
} | #[test]
fn as_unsigned_test1() {
let slice: &[T] = &[0xffffffffffffffff];
let as_unsigned: &[U] = slice.as_unsigned(); | random_line_split |
strconv.rs | ..3] = ['N' as u8, 'a' as u8, 'N' as u8];
/**
* Converts an integral number to its string representation as a byte vector.
* This is meant to be a common base implementation for all integral string
* conversion functions like `to_str()` or `to_str_radix()`.
*
* # Arguments
* - `num` - The number to convert. Accepts any number that
* implements the numeric traits.
* - `radix` - Base to use. Accepts only the values 2-36.
* - `sign` - How to emit the sign. Options are:
* - `SignNone`: No sign at all. Basically emits `abs(num)`.
* - `SignNeg`: Only `-` on negative values.
* - `SignAll`: Both `+` on positive, and `-` on negative numbers.
* - `f` - a callback which will be invoked for each ascii character
* which composes the string representation of this integer
*
* # Return value
* A tuple containing the byte vector, and a boolean flag indicating
* whether it represents a special value like `inf`, `-inf`, `NaN` or not.
* It returns a tuple because there can be ambiguity between a special value
* and a number representation at higher bases.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
*/
pub fn int_to_str_bytes_common<T:NumCast+Zero+Eq+Ord+Integer+
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, sign: SignFormat, f: &fn(u8)) {
assert!(2 <= radix && radix <= 36);
let _0: T = Zero::zero();
let neg = num < _0;
let radix_gen: T = cast(radix);
let mut deccum = num;
// This is just for integral types, the largest of which is a u64. The
// smallest base that we can have is 2, so the most number of digits we're
// ever going to have is 64
let mut buf = [0u8,..64];
let mut cur = 0;
// Loop at least once to make sure at least a `0` gets emitted.
loop {
// Calculate the absolute value of each digit instead of only
// doing it once for the whole number because a
// representable negative number doesn't necessary have an
// representable additive inverse of the same type
// (See twos complement). But we assume that for the
// numbers [-35.. 0] we always have [0.. 35].
let current_digit_signed = deccum % radix_gen;
let current_digit = if current_digit_signed < _0 {
-current_digit_signed
} else {
current_digit_signed
};
buf[cur] = match current_digit.to_u8() {
i @ 0..9 => '0' as u8 + i,
i => 'a' as u8 + (i - 10),
};
cur += 1;
deccum = deccum / radix_gen;
// No more digits to calculate for the non-fractional part -> break
if deccum == _0 { break; }
}
// Decide what sign to put in front
match sign {
SignNeg | SignAll if neg => { f('-' as u8); }
SignAll => { f('+' as u8); }
_ => ()
}
// We built the number in reverse order, so un-reverse it here
while cur > 0 {
cur -= 1;
f(buf[cur]);
}
}
/**
* Converts a number to its string representation as a byte vector.
* This is meant to be a common base implementation for all numeric string
* conversion functions like `to_str()` or `to_str_radix()`.
*
* # Arguments
* - `num` - The number to convert. Accepts any number that
* implements the numeric traits.
* - `radix` - Base to use. Accepts only the values 2-36.
* - `negative_zero` - Whether to treat the special value `-0` as
* `-0` or as `+0`.
* - `sign` - How to emit the sign. Options are:
* - `SignNone`: No sign at all. Basically emits `abs(num)`.
* - `SignNeg`: Only `-` on negative values.
* - `SignAll`: Both `+` on positive, and `-` on negative numbers.
* - `digits` - The amount of digits to use for emitting the
* fractional part, if any. Options are:
* - `DigAll`: All calculatable digits. Beware of bignums or
* fractions!
* - `DigMax(uint)`: Maximum N digits, truncating any trailing zeros.
* - `DigExact(uint)`: Exactly N digits.
*
* # Return value
* A tuple containing the byte vector, and a boolean flag indicating
* whether it represents a special value like `inf`, `-inf`, `NaN` or not.
* It returns a tuple because there can be ambiguity between a special value
* and a number representation at higher bases.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
*/
pub fn float_to_str_bytes_common<T:NumCast+Zero+One+Eq+Ord+Float+Round+
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, negative_zero: bool,
sign: SignFormat, digits: SignificantDigits) -> (~[u8], bool) {
assert!(2 <= radix && radix <= 36);
let _0: T = Zero::zero();
let _1: T = One::one();
match num.classify() {
FPNaN => { return ("NaN".as_bytes().to_owned(), true); }
FPInfinite if num > _0 => {
return match sign {
SignAll => ("+inf".as_bytes().to_owned(), true),
_ => ("inf".as_bytes().to_owned(), true)
};
}
FPInfinite if num < _0 => {
return match sign {
SignNone => ("inf".as_bytes().to_owned(), true),
_ => ("-inf".as_bytes().to_owned(), true),
};
}
_ => {}
}
let neg = num < _0 || (negative_zero && _1 / num == Float::neg_infinity());
let mut buf: ~[u8] = ~[];
let radix_gen: T = cast(radix as int);
// First emit the non-fractional part, looping at least once to make
// sure at least a `0` gets emitted.
let mut deccum = num.trunc();
loop {
// Calculate the absolute value of each digit instead of only
// doing it once for the whole number because a
// representable negative number doesn't necessary have an
// representable additive inverse of the same type
// (See twos complement). But we assume that for the
// numbers [-35.. 0] we always have [0.. 35].
let current_digit = (deccum % radix_gen).abs();
// Decrease the deccumulator one digit at a time
deccum = deccum / radix_gen;
deccum = deccum.trunc();
buf.push(char::from_digit(current_digit.to_int() as uint, radix)
.unwrap() as u8);
// No more digits to calculate for the non-fractional part -> break
if deccum == _0 { break; }
}
// If limited digits, calculate one digit more for rounding.
let (limit_digits, digit_count, exact) = match digits {
DigAll => (false, 0u, false),
DigMax(count) => (true, count+1, false),
DigExact(count) => (true, count+1, true)
};
// Decide what sign to put in front
match sign {
SignNeg | SignAll if neg => {
buf.push('-' as u8);
}
SignAll => {
buf.push('+' as u8);
}
_ => ()
}
buf.reverse();
// Remember start of the fractional digits.
// Points one beyond end of buf if none get generated,
// or at the '.' otherwise.
let start_fractional_digits = buf.len();
// Now emit the fractional part, if any
deccum = num.fract();
if deccum!= _0 || (limit_digits && exact && digit_count > 0) {
buf.push('.' as u8);
let mut dig = 0u;
// calculate new digits while
// - there is no limit and there are digits left
// - or there is a limit, it's not reached yet and
// - it's exact
// - or it's a maximum, and there are still digits left
while (!limit_digits && deccum!= _0)
|| (limit_digits && dig < digit_count && (
exact
|| (!exact && deccum!= _0)
)
) {
// Shift first fractional digit into the integer part
deccum = deccum * radix_gen;
// Calculate the absolute value of each digit.
// See note in first loop.
let current_digit = deccum.trunc().abs();
buf.push(char::from_digit(
current_digit.to_int() as uint, radix).unwrap() as u8);
// Decrease the deccumulator one fractional digit at a time
deccum = deccum.fract();
dig += 1u;
}
// If digits are limited, and that limit has been reached,
// cut off the one extra digit, and depending on its value
// round the remaining ones.
if limit_digits && dig == digit_count {
let ascii2value = |chr: u8| {
char::to_digit(chr as char, radix).unwrap() as uint
};
let value2ascii = |val: uint| {
char::from_digit(val, radix).unwrap() as u8
};
let extra_digit = ascii2value(buf.pop());
if extra_digit >= radix / 2 { // -> need to round
let mut i: int = buf.len() as int - 1;
loop {
// If reached left end of number, have to
// insert additional digit:
if i < 0
|| buf[i] == '-' as u8
|| buf[i] == '+' as u8 {
buf.insert((i + 1) as uint, value2ascii(1));
break;
}
// Skip the '.'
if buf[i] == '.' as u8 { i -= 1; loop; }
// Either increment the digit,
// or set to 0 if max and carry the 1.
let current_digit = ascii2value(buf[i]);
if current_digit < (radix - 1) {
buf[i] = value2ascii(current_digit+1);
break;
} else {
buf[i] = value2ascii(0);
i -= 1;
}
}
}
}
}
// if number of digits is not exact, remove all trailing '0's up to
// and including the '.'
if!exact {
let buf_max_i = buf.len() - 1;
// index to truncate from
let mut i = buf_max_i;
// discover trailing zeros of fractional part
while i > start_fractional_digits && buf[i] == '0' as u8 {
i -= 1;
}
// Only attempt to truncate digits if buf has fractional digits
if i >= start_fractional_digits {
// If buf ends with '.', cut that too.
if buf[i] == '.' as u8 { i -= 1 }
// only resize buf if we actually remove digits
if i < buf_max_i {
buf = buf.slice(0, i + 1).to_owned();
}
}
} // If exact and trailing '.', just cut that
else {
let max_i = buf.len() - 1;
if buf[max_i] == '.' as u8 {
buf = buf.slice(0, max_i).to_owned();
}
}
(buf, false) | * `to_str_bytes_common()`, for details see there.
*/
#[inline]
pub fn float_to_str_common<T:NumCast+Zero+One+Eq+Ord+NumStrConv+Float+Round+
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, negative_zero: bool,
sign: SignFormat, digits: SignificantDigits) -> (~str, bool) {
let (bytes, special) = float_to_str_bytes_common(num, radix,
negative_zero, sign, digits);
(str::from_utf8(bytes), special)
}
// Some constants for from_str_bytes_common's input validation,
// they define minimum radix values for which the character is a valid digit.
static DIGIT_P_RADIX: uint = ('p' as uint) - ('a' as uint) + 11u;
static DIGIT_I_RADIX: uint = ('i' as uint) - ('a' as uint) + 11u;
static DIGIT_E_RADIX: uint = ('e' as uint) - ('a' as uint) + 11u;
/**
* Parses a byte slice as a number. This is meant to
* be a common base implementation for all numeric string conversion
* functions like `from_str()` or `from_str_radix()`.
*
* # Arguments
* - `buf` - The byte slice to parse.
* - `radix` - Which base to parse the number as. Accepts 2-36.
* - `negative` - Whether to accept negative numbers.
* - `fractional` - Whether to accept numbers with fractional parts.
* - `special` - Whether to accept special values like `inf`
* and `NaN`. Can conflict with `radix`, see Failure.
* - `exponent` - Which exponent format to accept. Options are:
* - `ExpNone`: No Exponent, accepts just plain numbers like `42` or
* `-8.2`.
* - `ExpDec`: Accepts numbers with a decimal exponent like `42e5` or
* `8.2E-2`. The exponent string itself is always base 10.
* Can conflict with `radix`, see Failure.
* - `ExpBin`: Accepts numbers with a binary exponent like `42P-8` or
* `FFp128`. The exponent string itself is always base 10.
* Can conflict with `radix`, see Failure.
* - `empty_zero` - Whether to accept a empty `buf` as a 0 or not.
* - `ignore_underscores` - Whether all underscores within the string should
* be ignored.
*
* # Return value
* Returns `Some(n)` if `buf` parses to a number n without overflowing, and
* `None` otherwise, depending on the constraints set by the remaining
* arguments.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
* - Fails if `radix` > 14 and `exponent` is `ExpDec` due to conflict
* between digit and exponent sign `'e'`.
* - Fails if `radix` > 25 and `exponent` is `ExpBin` due to conflict
* between digit and exponent sign `'p'`.
* - Fails if `radix` > 18 and `special == true` due to conflict
* between digit and lowest first character in `inf` and `NaN`, the `'i'`.
*/
pub fn from_str_bytes_common<T:NumCast+Zero+One+Eq+Ord+Div<T,T>+
Mul<T,T>+Sub<T,T>+Neg<T>+Add<T,T>+
NumStrConv+Clone>(
buf: &[u8], radix: uint, negative: bool, fractional: bool,
special: bool, exponent: ExponentFormat, empty_zero: bool,
ignore_underscores: bool
) -> Option<T> {
match exponent {
ExpDec if radix >= DIGIT_E_RADIX // decimal exponent 'e'
=> fail!("from_str_bytes_common: radix %? incompatible with \
use of 'e' as decimal exponent", radix),
ExpBin if radix >= DIGIT_P_RADIX // binary exponent 'p'
=> fail!("from_str_bytes_common: radix %? incompatible with \
use of 'p' as binary exponent", radix),
_ if special && radix >= DIGIT_I_RADIX // first digit of 'inf'
=> fail!("from_str_bytes_common: radix %? incompatible with \
special values 'inf' and 'NaN'", radix),
_ if (radix as int) < 2
=> fail!("from_str_bytes_common: radix %? to low, \
must lie in the range [2, 36]", radix),
_ if (radix as int) > 36
=> fail!("from_str_bytes_common: radix %? to high, \
must lie in the range [2, 36]", radix),
_ => ()
}
let _0: T = Zero::zero();
let _1: T = One::one();
let radix_gen: T = cast(radix as int);
let len = buf.len();
if len == 0 {
if empty_zero {
return Some(_0);
} else {
return None;
}
}
if special {
if buf == INF_BUF || buf == POS_INF_BUF {
return NumStrConv::inf();
} else if buf == NEG_INF_BUF {
if negative {
return NumStrConv::neg_inf();
} else {
return None;
}
} else if buf == NAN_BUF {
return NumStrConv::nan();
}
}
let (start, accum_positive) = match buf[0] as char {
'-' if!negative => return None,
'-' => (1u, false),
'+' => (1u, true),
_ => (0u, true)
};
// Initialize accumulator with signed zero for floating point parsing to
// work
let mut accum = if accum_positive { _0.clone() } else { -_1 * _0};
let mut last_accum = accum.clone(); // Necessary to detect overflow
let mut i = start;
let mut exp_found = false;
// Parse integer part of number
while i < len {
let c = buf[i] as char;
match char::to_digit(c, radix) {
Some(digit) => {
// shift accum one digit left
accum = accum * radix_gen.clone();
// add/subtract current digit depending on sign
if accum_positive {
accum = accum + cast(digit as int);
} else {
accum = accum - cast(digit as int);
}
// Detect overflow by comparing to last value, except
// if we've not seen any non-zero digits.
if last_accum!= _0 {
if accum_positive && accum <= last_accum { return NumStrConv::inf(); }
if!accum_positive && accum >= last_accum { return NumStrConv::neg_inf(); }
// Detect overflow by reversing the shift-and-add proccess
if accum_positive &&
(last_accum!= ((accum - cast(digit as int))/radix_gen.clone())) {
return NumStrConv::inf();
}
if!accum_positive &&
(last_accum!= ((accum + cast(digit as int))/radix_gen.clone())) {
return NumStrConv::neg_inf();
}
}
last_accum = accum.clone();
}
None => match c {
'_' if ignore_underscores => {}
'e' | 'E' | 'p' | 'P' => {
exp_found = true;
break; // start of exponent
}
'.' if fractional => {
i += 1u; // skip the '.'
| }
/**
* Converts a number to its string representation. This is a wrapper for | random_line_split |
strconv.rs | // calculate new digits while
// - there is no limit and there are digits left
// - or there is a limit, it's not reached yet and
// - it's exact
// - or it's a maximum, and there are still digits left
while (!limit_digits && deccum!= _0)
|| (limit_digits && dig < digit_count && (
exact
|| (!exact && deccum!= _0)
)
) {
// Shift first fractional digit into the integer part
deccum = deccum * radix_gen;
// Calculate the absolute value of each digit.
// See note in first loop.
let current_digit = deccum.trunc().abs();
buf.push(char::from_digit(
current_digit.to_int() as uint, radix).unwrap() as u8);
// Decrease the deccumulator one fractional digit at a time
deccum = deccum.fract();
dig += 1u;
}
// If digits are limited, and that limit has been reached,
// cut off the one extra digit, and depending on its value
// round the remaining ones.
if limit_digits && dig == digit_count {
let ascii2value = |chr: u8| {
char::to_digit(chr as char, radix).unwrap() as uint
};
let value2ascii = |val: uint| {
char::from_digit(val, radix).unwrap() as u8
};
let extra_digit = ascii2value(buf.pop());
if extra_digit >= radix / 2 { // -> need to round
let mut i: int = buf.len() as int - 1;
loop {
// If reached left end of number, have to
// insert additional digit:
if i < 0
|| buf[i] == '-' as u8
|| buf[i] == '+' as u8 {
buf.insert((i + 1) as uint, value2ascii(1));
break;
}
// Skip the '.'
if buf[i] == '.' as u8 { i -= 1; loop; }
// Either increment the digit,
// or set to 0 if max and carry the 1.
let current_digit = ascii2value(buf[i]);
if current_digit < (radix - 1) {
buf[i] = value2ascii(current_digit+1);
break;
} else {
buf[i] = value2ascii(0);
i -= 1;
}
}
}
}
}
// if number of digits is not exact, remove all trailing '0's up to
// and including the '.'
if!exact {
let buf_max_i = buf.len() - 1;
// index to truncate from
let mut i = buf_max_i;
// discover trailing zeros of fractional part
while i > start_fractional_digits && buf[i] == '0' as u8 {
i -= 1;
}
// Only attempt to truncate digits if buf has fractional digits
if i >= start_fractional_digits {
// If buf ends with '.', cut that too.
if buf[i] == '.' as u8 { i -= 1 }
// only resize buf if we actually remove digits
if i < buf_max_i {
buf = buf.slice(0, i + 1).to_owned();
}
}
} // If exact and trailing '.', just cut that
else {
let max_i = buf.len() - 1;
if buf[max_i] == '.' as u8 {
buf = buf.slice(0, max_i).to_owned();
}
}
(buf, false)
}
/**
* Converts a number to its string representation. This is a wrapper for
* `to_str_bytes_common()`, for details see there.
*/
#[inline]
pub fn float_to_str_common<T:NumCast+Zero+One+Eq+Ord+NumStrConv+Float+Round+
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, negative_zero: bool,
sign: SignFormat, digits: SignificantDigits) -> (~str, bool) {
let (bytes, special) = float_to_str_bytes_common(num, radix,
negative_zero, sign, digits);
(str::from_utf8(bytes), special)
}
// Some constants for from_str_bytes_common's input validation,
// they define minimum radix values for which the character is a valid digit.
static DIGIT_P_RADIX: uint = ('p' as uint) - ('a' as uint) + 11u;
static DIGIT_I_RADIX: uint = ('i' as uint) - ('a' as uint) + 11u;
static DIGIT_E_RADIX: uint = ('e' as uint) - ('a' as uint) + 11u;
/**
* Parses a byte slice as a number. This is meant to
* be a common base implementation for all numeric string conversion
* functions like `from_str()` or `from_str_radix()`.
*
* # Arguments
* - `buf` - The byte slice to parse.
* - `radix` - Which base to parse the number as. Accepts 2-36.
* - `negative` - Whether to accept negative numbers.
* - `fractional` - Whether to accept numbers with fractional parts.
* - `special` - Whether to accept special values like `inf`
* and `NaN`. Can conflict with `radix`, see Failure.
* - `exponent` - Which exponent format to accept. Options are:
* - `ExpNone`: No Exponent, accepts just plain numbers like `42` or
* `-8.2`.
* - `ExpDec`: Accepts numbers with a decimal exponent like `42e5` or
* `8.2E-2`. The exponent string itself is always base 10.
* Can conflict with `radix`, see Failure.
* - `ExpBin`: Accepts numbers with a binary exponent like `42P-8` or
* `FFp128`. The exponent string itself is always base 10.
* Can conflict with `radix`, see Failure.
* - `empty_zero` - Whether to accept a empty `buf` as a 0 or not.
* - `ignore_underscores` - Whether all underscores within the string should
* be ignored.
*
* # Return value
* Returns `Some(n)` if `buf` parses to a number n without overflowing, and
* `None` otherwise, depending on the constraints set by the remaining
* arguments.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
* - Fails if `radix` > 14 and `exponent` is `ExpDec` due to conflict
* between digit and exponent sign `'e'`.
* - Fails if `radix` > 25 and `exponent` is `ExpBin` due to conflict
* between digit and exponent sign `'p'`.
* - Fails if `radix` > 18 and `special == true` due to conflict
* between digit and lowest first character in `inf` and `NaN`, the `'i'`.
*/
pub fn from_str_bytes_common<T:NumCast+Zero+One+Eq+Ord+Div<T,T>+
Mul<T,T>+Sub<T,T>+Neg<T>+Add<T,T>+
NumStrConv+Clone>(
buf: &[u8], radix: uint, negative: bool, fractional: bool,
special: bool, exponent: ExponentFormat, empty_zero: bool,
ignore_underscores: bool
) -> Option<T> {
match exponent {
ExpDec if radix >= DIGIT_E_RADIX // decimal exponent 'e'
=> fail!("from_str_bytes_common: radix %? incompatible with \
use of 'e' as decimal exponent", radix),
ExpBin if radix >= DIGIT_P_RADIX // binary exponent 'p'
=> fail!("from_str_bytes_common: radix %? incompatible with \
use of 'p' as binary exponent", radix),
_ if special && radix >= DIGIT_I_RADIX // first digit of 'inf'
=> fail!("from_str_bytes_common: radix %? incompatible with \
special values 'inf' and 'NaN'", radix),
_ if (radix as int) < 2
=> fail!("from_str_bytes_common: radix %? to low, \
must lie in the range [2, 36]", radix),
_ if (radix as int) > 36
=> fail!("from_str_bytes_common: radix %? to high, \
must lie in the range [2, 36]", radix),
_ => ()
}
let _0: T = Zero::zero();
let _1: T = One::one();
let radix_gen: T = cast(radix as int);
let len = buf.len();
if len == 0 {
if empty_zero {
return Some(_0);
} else {
return None;
}
}
if special {
if buf == INF_BUF || buf == POS_INF_BUF {
return NumStrConv::inf();
} else if buf == NEG_INF_BUF {
if negative {
return NumStrConv::neg_inf();
} else {
return None;
}
} else if buf == NAN_BUF {
return NumStrConv::nan();
}
}
let (start, accum_positive) = match buf[0] as char {
'-' if!negative => return None,
'-' => (1u, false),
'+' => (1u, true),
_ => (0u, true)
};
// Initialize accumulator with signed zero for floating point parsing to
// work
let mut accum = if accum_positive { _0.clone() } else { -_1 * _0};
let mut last_accum = accum.clone(); // Necessary to detect overflow
let mut i = start;
let mut exp_found = false;
// Parse integer part of number
while i < len {
let c = buf[i] as char;
match char::to_digit(c, radix) {
Some(digit) => {
// shift accum one digit left
accum = accum * radix_gen.clone();
// add/subtract current digit depending on sign
if accum_positive {
accum = accum + cast(digit as int);
} else {
accum = accum - cast(digit as int);
}
// Detect overflow by comparing to last value, except
// if we've not seen any non-zero digits.
if last_accum!= _0 {
if accum_positive && accum <= last_accum { return NumStrConv::inf(); }
if!accum_positive && accum >= last_accum { return NumStrConv::neg_inf(); }
// Detect overflow by reversing the shift-and-add proccess
if accum_positive &&
(last_accum!= ((accum - cast(digit as int))/radix_gen.clone())) {
return NumStrConv::inf();
}
if!accum_positive &&
(last_accum!= ((accum + cast(digit as int))/radix_gen.clone())) {
return NumStrConv::neg_inf();
}
}
last_accum = accum.clone();
}
None => match c {
'_' if ignore_underscores => {}
'e' | 'E' | 'p' | 'P' => {
exp_found = true;
break; // start of exponent
}
'.' if fractional => {
i += 1u; // skip the '.'
break; // start of fractional part
}
_ => return None // invalid number
}
}
i += 1u;
}
// Parse fractional part of number
// Skip if already reached start of exponent
if!exp_found {
let mut power = _1.clone();
while i < len {
let c = buf[i] as char;
match char::to_digit(c, radix) {
Some(digit) => {
// Decrease power one order of magnitude
power = power / radix_gen;
let digit_t: T = cast(digit);
// add/subtract current digit depending on sign
if accum_positive {
accum = accum + digit_t * power;
} else {
accum = accum - digit_t * power;
}
// Detect overflow by comparing to last value
if accum_positive && accum < last_accum { return NumStrConv::inf(); }
if!accum_positive && accum > last_accum { return NumStrConv::neg_inf(); }
last_accum = accum.clone();
}
None => match c {
'_' if ignore_underscores => {}
'e' | 'E' | 'p' | 'P' => {
exp_found = true;
break; // start of exponent
}
_ => return None // invalid number
}
}
i += 1u;
}
}
// Special case: buf not empty, but does not contain any digit in front
// of the exponent sign -> number is empty string
if i == start {
if empty_zero {
return Some(_0);
} else {
return None;
}
}
let mut multiplier = _1.clone();
if exp_found {
let c = buf[i] as char;
let base = match (c, exponent) {
// c is never _ so don't need to handle specially
('e', ExpDec) | ('E', ExpDec) => 10u,
('p', ExpBin) | ('P', ExpBin) => 2u,
_ => return None // char doesn't fit given exponent format
};
// parse remaining bytes as decimal integer,
// skipping the exponent char
let exp: Option<int> = from_str_bytes_common(
buf.slice(i+1, len), 10, true, false, false, ExpNone, false,
ignore_underscores);
match exp {
Some(exp_pow) => {
multiplier = if exp_pow < 0 {
_1 / pow_with_uint::<T>(base, (-exp_pow.to_int()) as uint)
} else {
pow_with_uint::<T>(base, exp_pow.to_int() as uint)
}
}
None => return None // invalid exponent -> invalid number
}
}
Some(accum * multiplier)
}
/**
* Parses a string as a number. This is a wrapper for
* `from_str_bytes_common()`, for details see there.
*/
#[inline]
pub fn from_str_common<T:NumCast+Zero+One+Eq+Ord+Div<T,T>+Mul<T,T>+
Sub<T,T>+Neg<T>+Add<T,T>+NumStrConv+Clone>(
buf: &str, radix: uint, negative: bool, fractional: bool,
special: bool, exponent: ExponentFormat, empty_zero: bool,
ignore_underscores: bool
) -> Option<T> {
from_str_bytes_common(buf.as_bytes(), radix, negative,
fractional, special, exponent, empty_zero,
ignore_underscores)
}
#[cfg(test)]
mod test {
use super::*;
use option::*;
#[test]
fn from_str_ignore_underscores() {
let s : Option<u8> = from_str_common("__1__", 2, false, false, false,
ExpNone, false, true);
assert_eq!(s, Some(1u8));
let n : Option<u8> = from_str_common("__1__", 2, false, false, false,
ExpNone, false, false);
assert_eq!(n, None);
let f : Option<f32> = from_str_common("_1_._5_e_1_", 10, false, true, false,
ExpDec, false, true);
assert_eq!(f, Some(1.5e1f32));
}
#[test]
fn from_str_issue5770() {
// try to parse 0b1_1111_1111 = 511 as a u8. Caused problems
// since 255*2+1 == 255 (mod 256) so the overflow wasn't
// detected.
let n : Option<u8> = from_str_common("111111111", 2, false, false, false,
ExpNone, false, false);
assert_eq!(n, None);
}
#[test]
fn from_str_issue7588() {
let u : Option<u8> = from_str_common("1000", 10, false, false, false,
ExpNone, false, false);
assert_eq!(u, None);
let s : Option<i16> = from_str_common("80000", 10, false, false, false,
ExpNone, false, false);
assert_eq!(s, None);
let f : Option<f32> = from_str_common(
"10000000000000000000000000000000000000000", 10, false, false, false,
ExpNone, false, false);
assert_eq!(f, NumStrConv::inf())
let fe : Option<f32> = from_str_common("1e40", 10, false, false, false,
ExpDec, false, false);
assert_eq!(fe, NumStrConv::inf())
}
}
#[cfg(test)]
mod bench {
use extra::test::BenchHarness;
use rand::{XorShiftRng, Rng};
use float;
use to_str::ToStr;
#[bench]
fn uint_to_str_rand(bh: &mut BenchHarness) | {
let mut rng = XorShiftRng::new();
do bh.iter {
rng.gen::<uint>().to_str();
}
} | identifier_body |
|
strconv.rs | for all integral string
* conversion functions like `to_str()` or `to_str_radix()`.
*
* # Arguments
* - `num` - The number to convert. Accepts any number that
* implements the numeric traits.
* - `radix` - Base to use. Accepts only the values 2-36.
* - `sign` - How to emit the sign. Options are:
* - `SignNone`: No sign at all. Basically emits `abs(num)`.
* - `SignNeg`: Only `-` on negative values.
* - `SignAll`: Both `+` on positive, and `-` on negative numbers.
* - `f` - a callback which will be invoked for each ascii character
* which composes the string representation of this integer
*
* # Return value
* A tuple containing the byte vector, and a boolean flag indicating
* whether it represents a special value like `inf`, `-inf`, `NaN` or not.
* It returns a tuple because there can be ambiguity between a special value
* and a number representation at higher bases.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
*/
pub fn int_to_str_bytes_common<T:NumCast+Zero+Eq+Ord+Integer+
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, sign: SignFormat, f: &fn(u8)) {
assert!(2 <= radix && radix <= 36);
let _0: T = Zero::zero();
let neg = num < _0;
let radix_gen: T = cast(radix);
let mut deccum = num;
// This is just for integral types, the largest of which is a u64. The
// smallest base that we can have is 2, so the most number of digits we're
// ever going to have is 64
let mut buf = [0u8,..64];
let mut cur = 0;
// Loop at least once to make sure at least a `0` gets emitted.
loop {
// Calculate the absolute value of each digit instead of only
// doing it once for the whole number because a
// representable negative number doesn't necessary have an
// representable additive inverse of the same type
// (See twos complement). But we assume that for the
// numbers [-35.. 0] we always have [0.. 35].
let current_digit_signed = deccum % radix_gen;
let current_digit = if current_digit_signed < _0 {
-current_digit_signed
} else {
current_digit_signed
};
buf[cur] = match current_digit.to_u8() {
i @ 0..9 => '0' as u8 + i,
i => 'a' as u8 + (i - 10),
};
cur += 1;
deccum = deccum / radix_gen;
// No more digits to calculate for the non-fractional part -> break
if deccum == _0 { break; }
}
// Decide what sign to put in front
match sign {
SignNeg | SignAll if neg => { f('-' as u8); }
SignAll => { f('+' as u8); }
_ => ()
}
// We built the number in reverse order, so un-reverse it here
while cur > 0 {
cur -= 1;
f(buf[cur]);
}
}
/**
* Converts a number to its string representation as a byte vector.
* This is meant to be a common base implementation for all numeric string
* conversion functions like `to_str()` or `to_str_radix()`.
*
* # Arguments
* - `num` - The number to convert. Accepts any number that
* implements the numeric traits.
* - `radix` - Base to use. Accepts only the values 2-36.
* - `negative_zero` - Whether to treat the special value `-0` as
* `-0` or as `+0`.
* - `sign` - How to emit the sign. Options are:
* - `SignNone`: No sign at all. Basically emits `abs(num)`.
* - `SignNeg`: Only `-` on negative values.
* - `SignAll`: Both `+` on positive, and `-` on negative numbers.
* - `digits` - The amount of digits to use for emitting the
* fractional part, if any. Options are:
* - `DigAll`: All calculatable digits. Beware of bignums or
* fractions!
* - `DigMax(uint)`: Maximum N digits, truncating any trailing zeros.
* - `DigExact(uint)`: Exactly N digits.
*
* # Return value
* A tuple containing the byte vector, and a boolean flag indicating
* whether it represents a special value like `inf`, `-inf`, `NaN` or not.
* It returns a tuple because there can be ambiguity between a special value
* and a number representation at higher bases.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
*/
pub fn float_to_str_bytes_common<T:NumCast+Zero+One+Eq+Ord+Float+Round+
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, negative_zero: bool,
sign: SignFormat, digits: SignificantDigits) -> (~[u8], bool) {
assert!(2 <= radix && radix <= 36);
let _0: T = Zero::zero();
let _1: T = One::one();
match num.classify() {
FPNaN => { return ("NaN".as_bytes().to_owned(), true); }
FPInfinite if num > _0 => {
return match sign {
SignAll => ("+inf".as_bytes().to_owned(), true),
_ => ("inf".as_bytes().to_owned(), true)
};
}
FPInfinite if num < _0 => {
return match sign {
SignNone => ("inf".as_bytes().to_owned(), true),
_ => ("-inf".as_bytes().to_owned(), true),
};
}
_ => {}
}
let neg = num < _0 || (negative_zero && _1 / num == Float::neg_infinity());
let mut buf: ~[u8] = ~[];
let radix_gen: T = cast(radix as int);
// First emit the non-fractional part, looping at least once to make
// sure at least a `0` gets emitted.
let mut deccum = num.trunc();
loop {
// Calculate the absolute value of each digit instead of only
// doing it once for the whole number because a
// representable negative number doesn't necessary have an
// representable additive inverse of the same type
// (See twos complement). But we assume that for the
// numbers [-35.. 0] we always have [0.. 35].
let current_digit = (deccum % radix_gen).abs();
// Decrease the deccumulator one digit at a time
deccum = deccum / radix_gen;
deccum = deccum.trunc();
buf.push(char::from_digit(current_digit.to_int() as uint, radix)
.unwrap() as u8);
// No more digits to calculate for the non-fractional part -> break
if deccum == _0 { break; }
}
// If limited digits, calculate one digit more for rounding.
let (limit_digits, digit_count, exact) = match digits {
DigAll => (false, 0u, false),
DigMax(count) => (true, count+1, false),
DigExact(count) => (true, count+1, true)
};
// Decide what sign to put in front
match sign {
SignNeg | SignAll if neg => {
buf.push('-' as u8);
}
SignAll => {
buf.push('+' as u8);
}
_ => ()
}
buf.reverse();
// Remember start of the fractional digits.
// Points one beyond end of buf if none get generated,
// or at the '.' otherwise.
let start_fractional_digits = buf.len();
// Now emit the fractional part, if any
deccum = num.fract();
if deccum!= _0 || (limit_digits && exact && digit_count > 0) {
buf.push('.' as u8);
let mut dig = 0u;
// calculate new digits while
// - there is no limit and there are digits left
// - or there is a limit, it's not reached yet and
// - it's exact
// - or it's a maximum, and there are still digits left
while (!limit_digits && deccum!= _0)
|| (limit_digits && dig < digit_count && (
exact
|| (!exact && deccum!= _0)
)
) {
// Shift first fractional digit into the integer part
deccum = deccum * radix_gen;
// Calculate the absolute value of each digit.
// See note in first loop.
let current_digit = deccum.trunc().abs();
buf.push(char::from_digit(
current_digit.to_int() as uint, radix).unwrap() as u8);
// Decrease the deccumulator one fractional digit at a time
deccum = deccum.fract();
dig += 1u;
}
// If digits are limited, and that limit has been reached,
// cut off the one extra digit, and depending on its value
// round the remaining ones.
if limit_digits && dig == digit_count {
let ascii2value = |chr: u8| {
char::to_digit(chr as char, radix).unwrap() as uint
};
let value2ascii = |val: uint| {
char::from_digit(val, radix).unwrap() as u8
};
let extra_digit = ascii2value(buf.pop());
if extra_digit >= radix / 2 { // -> need to round
let mut i: int = buf.len() as int - 1;
loop {
// If reached left end of number, have to
// insert additional digit:
if i < 0
|| buf[i] == '-' as u8
|| buf[i] == '+' as u8 {
buf.insert((i + 1) as uint, value2ascii(1));
break;
}
// Skip the '.'
if buf[i] == '.' as u8 { i -= 1; loop; }
// Either increment the digit,
// or set to 0 if max and carry the 1.
let current_digit = ascii2value(buf[i]);
if current_digit < (radix - 1) {
buf[i] = value2ascii(current_digit+1);
break;
} else {
buf[i] = value2ascii(0);
i -= 1;
}
}
}
}
}
// if number of digits is not exact, remove all trailing '0's up to
// and including the '.'
if!exact {
let buf_max_i = buf.len() - 1;
// index to truncate from
let mut i = buf_max_i;
// discover trailing zeros of fractional part
while i > start_fractional_digits && buf[i] == '0' as u8 {
i -= 1;
}
// Only attempt to truncate digits if buf has fractional digits
if i >= start_fractional_digits {
// If buf ends with '.', cut that too.
if buf[i] == '.' as u8 { i -= 1 }
// only resize buf if we actually remove digits
if i < buf_max_i {
buf = buf.slice(0, i + 1).to_owned();
}
}
} // If exact and trailing '.', just cut that
else {
let max_i = buf.len() - 1;
if buf[max_i] == '.' as u8 {
buf = buf.slice(0, max_i).to_owned();
}
}
(buf, false)
}
/**
* Converts a number to its string representation. This is a wrapper for
* `to_str_bytes_common()`, for details see there.
*/
#[inline]
pub fn float_to_str_common<T:NumCast+Zero+One+Eq+Ord+NumStrConv+Float+Round+
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, negative_zero: bool,
sign: SignFormat, digits: SignificantDigits) -> (~str, bool) {
let (bytes, special) = float_to_str_bytes_common(num, radix,
negative_zero, sign, digits);
(str::from_utf8(bytes), special)
}
// Some constants for from_str_bytes_common's input validation,
// they define minimum radix values for which the character is a valid digit.
static DIGIT_P_RADIX: uint = ('p' as uint) - ('a' as uint) + 11u;
static DIGIT_I_RADIX: uint = ('i' as uint) - ('a' as uint) + 11u;
static DIGIT_E_RADIX: uint = ('e' as uint) - ('a' as uint) + 11u;
/**
* Parses a byte slice as a number. This is meant to
* be a common base implementation for all numeric string conversion
* functions like `from_str()` or `from_str_radix()`.
*
* # Arguments
* - `buf` - The byte slice to parse.
* - `radix` - Which base to parse the number as. Accepts 2-36.
* - `negative` - Whether to accept negative numbers.
* - `fractional` - Whether to accept numbers with fractional parts.
* - `special` - Whether to accept special values like `inf`
* and `NaN`. Can conflict with `radix`, see Failure.
* - `exponent` - Which exponent format to accept. Options are:
* - `ExpNone`: No Exponent, accepts just plain numbers like `42` or
* `-8.2`.
* - `ExpDec`: Accepts numbers with a decimal exponent like `42e5` or
* `8.2E-2`. The exponent string itself is always base 10.
* Can conflict with `radix`, see Failure.
* - `ExpBin`: Accepts numbers with a binary exponent like `42P-8` or
* `FFp128`. The exponent string itself is always base 10.
* Can conflict with `radix`, see Failure.
* - `empty_zero` - Whether to accept a empty `buf` as a 0 or not.
* - `ignore_underscores` - Whether all underscores within the string should
* be ignored.
*
* # Return value
* Returns `Some(n)` if `buf` parses to a number n without overflowing, and
* `None` otherwise, depending on the constraints set by the remaining
* arguments.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
* - Fails if `radix` > 14 and `exponent` is `ExpDec` due to conflict
* between digit and exponent sign `'e'`.
* - Fails if `radix` > 25 and `exponent` is `ExpBin` due to conflict
* between digit and exponent sign `'p'`.
* - Fails if `radix` > 18 and `special == true` due to conflict
* between digit and lowest first character in `inf` and `NaN`, the `'i'`.
*/
pub fn from_str_bytes_common<T:NumCast+Zero+One+Eq+Ord+Div<T,T>+
Mul<T,T>+Sub<T,T>+Neg<T>+Add<T,T>+
NumStrConv+Clone>(
buf: &[u8], radix: uint, negative: bool, fractional: bool,
special: bool, exponent: ExponentFormat, empty_zero: bool,
ignore_underscores: bool
) -> Option<T> {
match exponent {
ExpDec if radix >= DIGIT_E_RADIX // decimal exponent 'e'
=> fail!("from_str_bytes_common: radix %? incompatible with \
use of 'e' as decimal exponent", radix),
ExpBin if radix >= DIGIT_P_RADIX // binary exponent 'p'
=> fail!("from_str_bytes_common: radix %? incompatible with \
use of 'p' as binary exponent", radix),
_ if special && radix >= DIGIT_I_RADIX // first digit of 'inf'
=> fail!("from_str_bytes_common: radix %? incompatible with \
special values 'inf' and 'NaN'", radix),
_ if (radix as int) < 2
=> fail!("from_str_bytes_common: radix %? to low, \
must lie in the range [2, 36]", radix),
_ if (radix as int) > 36
=> fail!("from_str_bytes_common: radix %? to high, \
must lie in the range [2, 36]", radix),
_ => ()
}
let _0: T = Zero::zero();
let _1: T = One::one();
let radix_gen: T = cast(radix as int);
let len = buf.len();
if len == 0 {
if empty_zero {
return Some(_0);
} else {
return None;
}
}
if special {
if buf == INF_BUF || buf == POS_INF_BUF {
return NumStrConv::inf();
} else if buf == NEG_INF_BUF {
if negative {
return NumStrConv::neg_inf();
} else {
return None;
}
} else if buf == NAN_BUF {
return NumStrConv::nan();
}
}
let (start, accum_positive) = match buf[0] as char {
'-' if!negative => return None,
'-' => (1u, false),
'+' => (1u, true),
_ => (0u, true)
};
// Initialize accumulator with signed zero for floating point parsing to
// work
let mut accum = if accum_positive { _0.clone() } else { -_1 * _0};
let mut last_accum = accum.clone(); // Necessary to detect overflow
let mut i = start;
let mut exp_found = false;
// Parse integer part of number
while i < len {
let c = buf[i] as char;
match char::to_digit(c, radix) {
Some(digit) => {
// shift accum one digit left
accum = accum * radix_gen.clone();
// add/subtract current digit depending on sign
if accum_positive {
accum = accum + cast(digit as int);
} else {
accum = accum - cast(digit as int);
}
// Detect overflow by comparing to last value, except
// if we've not seen any non-zero digits.
if last_accum!= _0 {
if accum_positive && accum <= last_accum { return NumStrConv::inf(); }
if!accum_positive && accum >= last_accum { return NumStrConv::neg_inf(); }
// Detect overflow by reversing the shift-and-add proccess
if accum_positive &&
(last_accum!= ((accum - cast(digit as int))/radix_gen.clone())) {
return NumStrConv::inf();
}
if!accum_positive &&
(last_accum!= ((accum + cast(digit as int))/radix_gen.clone())) {
return NumStrConv::neg_inf();
}
}
last_accum = accum.clone();
}
None => match c {
'_' if ignore_underscores => {}
'e' | 'E' | 'p' | 'P' => {
exp_found = true;
break; // start of exponent
}
'.' if fractional => | {
i += 1u; // skip the '.'
break; // start of fractional part
} | conditional_block |
|
strconv.rs | - `DigExact(uint)`: Exactly N digits.
*
* # Return value
* A tuple containing the byte vector, and a boolean flag indicating
* whether it represents a special value like `inf`, `-inf`, `NaN` or not.
* It returns a tuple because there can be ambiguity between a special value
* and a number representation at higher bases.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
*/
pub fn float_to_str_bytes_common<T:NumCast+Zero+One+Eq+Ord+Float+Round+
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, negative_zero: bool,
sign: SignFormat, digits: SignificantDigits) -> (~[u8], bool) {
assert!(2 <= radix && radix <= 36);
let _0: T = Zero::zero();
let _1: T = One::one();
match num.classify() {
FPNaN => { return ("NaN".as_bytes().to_owned(), true); }
FPInfinite if num > _0 => {
return match sign {
SignAll => ("+inf".as_bytes().to_owned(), true),
_ => ("inf".as_bytes().to_owned(), true)
};
}
FPInfinite if num < _0 => {
return match sign {
SignNone => ("inf".as_bytes().to_owned(), true),
_ => ("-inf".as_bytes().to_owned(), true),
};
}
_ => {}
}
let neg = num < _0 || (negative_zero && _1 / num == Float::neg_infinity());
let mut buf: ~[u8] = ~[];
let radix_gen: T = cast(radix as int);
// First emit the non-fractional part, looping at least once to make
// sure at least a `0` gets emitted.
let mut deccum = num.trunc();
loop {
// Calculate the absolute value of each digit instead of only
// doing it once for the whole number because a
// representable negative number doesn't necessary have an
// representable additive inverse of the same type
// (See twos complement). But we assume that for the
// numbers [-35.. 0] we always have [0.. 35].
let current_digit = (deccum % radix_gen).abs();
// Decrease the deccumulator one digit at a time
deccum = deccum / radix_gen;
deccum = deccum.trunc();
buf.push(char::from_digit(current_digit.to_int() as uint, radix)
.unwrap() as u8);
// No more digits to calculate for the non-fractional part -> break
if deccum == _0 { break; }
}
// If limited digits, calculate one digit more for rounding.
let (limit_digits, digit_count, exact) = match digits {
DigAll => (false, 0u, false),
DigMax(count) => (true, count+1, false),
DigExact(count) => (true, count+1, true)
};
// Decide what sign to put in front
match sign {
SignNeg | SignAll if neg => {
buf.push('-' as u8);
}
SignAll => {
buf.push('+' as u8);
}
_ => ()
}
buf.reverse();
// Remember start of the fractional digits.
// Points one beyond end of buf if none get generated,
// or at the '.' otherwise.
let start_fractional_digits = buf.len();
// Now emit the fractional part, if any
deccum = num.fract();
if deccum!= _0 || (limit_digits && exact && digit_count > 0) {
buf.push('.' as u8);
let mut dig = 0u;
// calculate new digits while
// - there is no limit and there are digits left
// - or there is a limit, it's not reached yet and
// - it's exact
// - or it's a maximum, and there are still digits left
while (!limit_digits && deccum!= _0)
|| (limit_digits && dig < digit_count && (
exact
|| (!exact && deccum!= _0)
)
) {
// Shift first fractional digit into the integer part
deccum = deccum * radix_gen;
// Calculate the absolute value of each digit.
// See note in first loop.
let current_digit = deccum.trunc().abs();
buf.push(char::from_digit(
current_digit.to_int() as uint, radix).unwrap() as u8);
// Decrease the deccumulator one fractional digit at a time
deccum = deccum.fract();
dig += 1u;
}
// If digits are limited, and that limit has been reached,
// cut off the one extra digit, and depending on its value
// round the remaining ones.
if limit_digits && dig == digit_count {
let ascii2value = |chr: u8| {
char::to_digit(chr as char, radix).unwrap() as uint
};
let value2ascii = |val: uint| {
char::from_digit(val, radix).unwrap() as u8
};
let extra_digit = ascii2value(buf.pop());
if extra_digit >= radix / 2 { // -> need to round
let mut i: int = buf.len() as int - 1;
loop {
// If reached left end of number, have to
// insert additional digit:
if i < 0
|| buf[i] == '-' as u8
|| buf[i] == '+' as u8 {
buf.insert((i + 1) as uint, value2ascii(1));
break;
}
// Skip the '.'
if buf[i] == '.' as u8 { i -= 1; loop; }
// Either increment the digit,
// or set to 0 if max and carry the 1.
let current_digit = ascii2value(buf[i]);
if current_digit < (radix - 1) {
buf[i] = value2ascii(current_digit+1);
break;
} else {
buf[i] = value2ascii(0);
i -= 1;
}
}
}
}
}
// if number of digits is not exact, remove all trailing '0's up to
// and including the '.'
if!exact {
let buf_max_i = buf.len() - 1;
// index to truncate from
let mut i = buf_max_i;
// discover trailing zeros of fractional part
while i > start_fractional_digits && buf[i] == '0' as u8 {
i -= 1;
}
// Only attempt to truncate digits if buf has fractional digits
if i >= start_fractional_digits {
// If buf ends with '.', cut that too.
if buf[i] == '.' as u8 { i -= 1 }
// only resize buf if we actually remove digits
if i < buf_max_i {
buf = buf.slice(0, i + 1).to_owned();
}
}
} // If exact and trailing '.', just cut that
else {
let max_i = buf.len() - 1;
if buf[max_i] == '.' as u8 {
buf = buf.slice(0, max_i).to_owned();
}
}
(buf, false)
}
/**
* Converts a number to its string representation. This is a wrapper for
* `to_str_bytes_common()`, for details see there.
*/
#[inline]
pub fn float_to_str_common<T:NumCast+Zero+One+Eq+Ord+NumStrConv+Float+Round+
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, negative_zero: bool,
sign: SignFormat, digits: SignificantDigits) -> (~str, bool) {
let (bytes, special) = float_to_str_bytes_common(num, radix,
negative_zero, sign, digits);
(str::from_utf8(bytes), special)
}
// Some constants for from_str_bytes_common's input validation,
// they define minimum radix values for which the character is a valid digit.
static DIGIT_P_RADIX: uint = ('p' as uint) - ('a' as uint) + 11u;
static DIGIT_I_RADIX: uint = ('i' as uint) - ('a' as uint) + 11u;
static DIGIT_E_RADIX: uint = ('e' as uint) - ('a' as uint) + 11u;
/**
* Parses a byte slice as a number. This is meant to
* be a common base implementation for all numeric string conversion
* functions like `from_str()` or `from_str_radix()`.
*
* # Arguments
* - `buf` - The byte slice to parse.
* - `radix` - Which base to parse the number as. Accepts 2-36.
* - `negative` - Whether to accept negative numbers.
* - `fractional` - Whether to accept numbers with fractional parts.
* - `special` - Whether to accept special values like `inf`
* and `NaN`. Can conflict with `radix`, see Failure.
* - `exponent` - Which exponent format to accept. Options are:
* - `ExpNone`: No Exponent, accepts just plain numbers like `42` or
* `-8.2`.
* - `ExpDec`: Accepts numbers with a decimal exponent like `42e5` or
* `8.2E-2`. The exponent string itself is always base 10.
* Can conflict with `radix`, see Failure.
* - `ExpBin`: Accepts numbers with a binary exponent like `42P-8` or
* `FFp128`. The exponent string itself is always base 10.
* Can conflict with `radix`, see Failure.
* - `empty_zero` - Whether to accept a empty `buf` as a 0 or not.
* - `ignore_underscores` - Whether all underscores within the string should
* be ignored.
*
* # Return value
* Returns `Some(n)` if `buf` parses to a number n without overflowing, and
* `None` otherwise, depending on the constraints set by the remaining
* arguments.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
* - Fails if `radix` > 14 and `exponent` is `ExpDec` due to conflict
* between digit and exponent sign `'e'`.
* - Fails if `radix` > 25 and `exponent` is `ExpBin` due to conflict
* between digit and exponent sign `'p'`.
* - Fails if `radix` > 18 and `special == true` due to conflict
* between digit and lowest first character in `inf` and `NaN`, the `'i'`.
*/
pub fn from_str_bytes_common<T:NumCast+Zero+One+Eq+Ord+Div<T,T>+
Mul<T,T>+Sub<T,T>+Neg<T>+Add<T,T>+
NumStrConv+Clone>(
buf: &[u8], radix: uint, negative: bool, fractional: bool,
special: bool, exponent: ExponentFormat, empty_zero: bool,
ignore_underscores: bool
) -> Option<T> {
match exponent {
ExpDec if radix >= DIGIT_E_RADIX // decimal exponent 'e'
=> fail!("from_str_bytes_common: radix %? incompatible with \
use of 'e' as decimal exponent", radix),
ExpBin if radix >= DIGIT_P_RADIX // binary exponent 'p'
=> fail!("from_str_bytes_common: radix %? incompatible with \
use of 'p' as binary exponent", radix),
_ if special && radix >= DIGIT_I_RADIX // first digit of 'inf'
=> fail!("from_str_bytes_common: radix %? incompatible with \
special values 'inf' and 'NaN'", radix),
_ if (radix as int) < 2
=> fail!("from_str_bytes_common: radix %? to low, \
must lie in the range [2, 36]", radix),
_ if (radix as int) > 36
=> fail!("from_str_bytes_common: radix %? to high, \
must lie in the range [2, 36]", radix),
_ => ()
}
let _0: T = Zero::zero();
let _1: T = One::one();
let radix_gen: T = cast(radix as int);
let len = buf.len();
if len == 0 {
if empty_zero {
return Some(_0);
} else {
return None;
}
}
if special {
if buf == INF_BUF || buf == POS_INF_BUF {
return NumStrConv::inf();
} else if buf == NEG_INF_BUF {
if negative {
return NumStrConv::neg_inf();
} else {
return None;
}
} else if buf == NAN_BUF {
return NumStrConv::nan();
}
}
let (start, accum_positive) = match buf[0] as char {
'-' if!negative => return None,
'-' => (1u, false),
'+' => (1u, true),
_ => (0u, true)
};
// Initialize accumulator with signed zero for floating point parsing to
// work
let mut accum = if accum_positive { _0.clone() } else { -_1 * _0};
let mut last_accum = accum.clone(); // Necessary to detect overflow
let mut i = start;
let mut exp_found = false;
// Parse integer part of number
while i < len {
let c = buf[i] as char;
match char::to_digit(c, radix) {
Some(digit) => {
// shift accum one digit left
accum = accum * radix_gen.clone();
// add/subtract current digit depending on sign
if accum_positive {
accum = accum + cast(digit as int);
} else {
accum = accum - cast(digit as int);
}
// Detect overflow by comparing to last value, except
// if we've not seen any non-zero digits.
if last_accum!= _0 {
if accum_positive && accum <= last_accum { return NumStrConv::inf(); }
if!accum_positive && accum >= last_accum { return NumStrConv::neg_inf(); }
// Detect overflow by reversing the shift-and-add proccess
if accum_positive &&
(last_accum!= ((accum - cast(digit as int))/radix_gen.clone())) {
return NumStrConv::inf();
}
if!accum_positive &&
(last_accum!= ((accum + cast(digit as int))/radix_gen.clone())) {
return NumStrConv::neg_inf();
}
}
last_accum = accum.clone();
}
None => match c {
'_' if ignore_underscores => {}
'e' | 'E' | 'p' | 'P' => {
exp_found = true;
break; // start of exponent
}
'.' if fractional => {
i += 1u; // skip the '.'
break; // start of fractional part
}
_ => return None // invalid number
}
}
i += 1u;
}
// Parse fractional part of number
// Skip if already reached start of exponent
if!exp_found {
let mut power = _1.clone();
while i < len {
let c = buf[i] as char;
match char::to_digit(c, radix) {
Some(digit) => {
// Decrease power one order of magnitude
power = power / radix_gen;
let digit_t: T = cast(digit);
// add/subtract current digit depending on sign
if accum_positive {
accum = accum + digit_t * power;
} else {
accum = accum - digit_t * power;
}
// Detect overflow by comparing to last value
if accum_positive && accum < last_accum { return NumStrConv::inf(); }
if!accum_positive && accum > last_accum { return NumStrConv::neg_inf(); }
last_accum = accum.clone();
}
None => match c {
'_' if ignore_underscores => {}
'e' | 'E' | 'p' | 'P' => {
exp_found = true;
break; // start of exponent
}
_ => return None // invalid number
}
}
i += 1u;
}
}
// Special case: buf not empty, but does not contain any digit in front
// of the exponent sign -> number is empty string
if i == start {
if empty_zero {
return Some(_0);
} else {
return None;
}
}
let mut multiplier = _1.clone();
if exp_found {
let c = buf[i] as char;
let base = match (c, exponent) {
// c is never _ so don't need to handle specially
('e', ExpDec) | ('E', ExpDec) => 10u,
('p', ExpBin) | ('P', ExpBin) => 2u,
_ => return None // char doesn't fit given exponent format
};
// parse remaining bytes as decimal integer,
// skipping the exponent char
let exp: Option<int> = from_str_bytes_common(
buf.slice(i+1, len), 10, true, false, false, ExpNone, false,
ignore_underscores);
match exp {
Some(exp_pow) => {
multiplier = if exp_pow < 0 {
_1 / pow_with_uint::<T>(base, (-exp_pow.to_int()) as uint)
} else {
pow_with_uint::<T>(base, exp_pow.to_int() as uint)
}
}
None => return None // invalid exponent -> invalid number
}
}
Some(accum * multiplier)
}
/**
* Parses a string as a number. This is a wrapper for
* `from_str_bytes_common()`, for details see there.
*/
#[inline]
pub fn | from_str_common | identifier_name |
|
mod.rs | /*
use std::fmt;
use std::io::{self, Write};
use std::marker::PhantomData;
use std::sync::mpsc;
use url::Url;
use tick;
use time::now_utc;
use header::{self, Headers};
use http::{self, conn};
use method::Method;
use net::{Fresh, Streaming};
use status::StatusCode;
use version::HttpVersion;
*/
pub use self::decode::Decoder;
pub use self::encode::Encoder;
pub use self::parse::parse;
mod decode;
mod encode;
mod parse;
/*
fn should_have_response_body(method: &Method, status: u16) -> bool {
trace!("should_have_response_body({:?}, {})", method, status);
match (method, status) {
(&Method::Head, _) |
(_, 100...199) |
(_, 204) |
(_, 304) |
(&Method::Connect, 200...299) => false,
_ => true
}
}
*/
/*
const MAX_INVALID_RESPONSE_BYTES: usize = 1024 * 128;
impl HttpMessage for Http11Message {
fn get_incoming(&mut self) -> ::Result<ResponseHead> {
unimplemented!();
/*
try!(self.flush_outgoing());
let stream = match self.stream.take() {
Some(stream) => stream,
None => {
// The message was already in the reading state...
// TODO Decide what happens in case we try to get a new incoming at that point
return Err(From::from(
io::Error::new(io::ErrorKind::Other,
"Read already in progress")));
}
};
let expected_no_content = stream.previous_response_expected_no_content();
trace!("previous_response_expected_no_content = {}", expected_no_content);
let mut stream = BufReader::new(stream);
let mut invalid_bytes_read = 0;
let head;
loop {
head = match parse_response(&mut stream) {
Ok(head) => head,
Err(::Error::Version)
if expected_no_content && invalid_bytes_read < MAX_INVALID_RESPONSE_BYTES => {
trace!("expected_no_content, found content");
invalid_bytes_read += 1;
stream.consume(1);
continue;
}
Err(e) => {
self.stream = Some(stream.into_inner());
return Err(e);
}
};
break;
}
let raw_status = head.subject;
let headers = head.headers;
let method = self.method.take().unwrap_or(Method::Get);
let is_empty =!should_have_response_body(&method, raw_status.0);
stream.get_mut().set_previous_response_expected_no_content(is_empty);
// According to https://tools.ietf.org/html/rfc7230#section-3.3.3
// 1. HEAD reponses, and Status 1xx, 204, and 304 cannot have a body.
// 2. Status 2xx to a CONNECT cannot have a body.
// 3. Transfer-Encoding: chunked has a chunked body.
// 4. If multiple differing Content-Length headers or invalid, close connection.
// 5. Content-Length header has a sized body.
// 6. Not Client.
// 7. Read till EOF.
self.reader = Some(if is_empty {
SizedReader(stream, 0)
} else {
if let Some(&TransferEncoding(ref codings)) = headers.get() {
if codings.last() == Some(&Chunked) {
ChunkedReader(stream, None)
} else {
trace!("not chuncked. read till eof");
EofReader(stream)
}
} else if let Some(&ContentLength(len)) = headers.get() {
SizedReader(stream, len)
} else if headers.has::<ContentLength>() {
trace!("illegal Content-Length: {:?}", headers.get_raw("Content-Length"));
return Err(Error::Header);
} else {
trace!("neither Transfer-Encoding nor Content-Length");
EofReader(stream)
}
});
trace!("Http11Message.reader = {:?}", self.reader);
Ok(ResponseHead { | }
}
*/ | headers: headers,
raw_status: raw_status,
version: head.version,
})
*/ | random_line_split |
decode.rs | use std::{iter, fs, path};
use image::ImageFormat;
use criterion::{Criterion, criterion_group, criterion_main};
#[derive(Clone, Copy)]
struct BenchDef {
dir: &'static [&'static str],
files: &'static [&'static str],
format: ImageFormat,
}
fn load_all(c: &mut Criterion) | files: &[
"alpha_gif_a.gif",
"sample_1.gif",
],
format: ImageFormat::Gif,
},
BenchDef {
dir: &["hdr", "images"],
files: &[
"image1.hdr",
"rgbr4x4.hdr",
],
format: ImageFormat::Hdr,
},
BenchDef {
dir: &["ico", "images"],
files: &[
"bmp-24bpp-mask.ico",
"bmp-32bpp-alpha.ico",
"png-32bpp-alpha.ico",
"smile.ico",
],
format: ImageFormat::Ico,
},
BenchDef {
dir: &["jpg", "progressive"],
files: &[
"3.jpg",
"cat.jpg",
"test.jpg",
],
format: ImageFormat::Jpeg,
},
// TODO: pnm
// TODO: png
BenchDef {
dir: &["tga", "testsuite"],
files: &[
"cbw8.tga",
"ctc24.tga",
"ubw8.tga",
"utc24.tga",
],
format: ImageFormat::Tga,
},
BenchDef {
dir: &["tiff", "testsuite"],
files: &[
"hpredict.tiff",
"hpredict_packbits.tiff",
"mandrill.tiff",
"rgb-3c-16b.tiff",
],
format: ImageFormat::Tiff,
},
BenchDef {
dir: &["webp", "images"],
files: &[
"simple-gray.webp",
"simple-rgb.webp",
"vp8x-gray.webp",
"vp8x-rgb.webp",
],
format: ImageFormat::WebP,
},
];
for bench in BENCH_DEFS {
bench_load(c, bench);
}
}
criterion_group!(benches, load_all);
criterion_main!(benches);
fn bench_load(c: &mut Criterion, def: &BenchDef) {
let group_name = format!("load-{:?}", def.format);
let mut group = c.benchmark_group(&group_name);
let paths = IMAGE_DIR.iter().chain(def.dir);
for file_name in def.files {
let path: path::PathBuf = paths.clone().chain(iter::once(file_name)).collect();
let buf = fs::read(path).unwrap();
group.bench_function(file_name.to_owned(), |b| b.iter(|| {
image::load_from_memory_with_format(&buf, def.format).unwrap();
}));
}
}
const IMAGE_DIR: [&'static str; 3] = [".", "tests", "images"];
| {
const BENCH_DEFS: &'static [BenchDef] = &[
BenchDef {
dir: &["bmp", "images"],
files: &[
"Core_1_Bit.bmp",
"Core_4_Bit.bmp",
"Core_8_Bit.bmp",
"rgb16.bmp",
"rgb24.bmp",
"rgb32.bmp",
"pal4rle.bmp",
"pal8rle.bmp",
"rgb16-565.bmp",
"rgb32bf.bmp",
],
format: ImageFormat::Bmp,
},
BenchDef {
dir: &["gif", "simple"], | identifier_body |
decode.rs | use std::{iter, fs, path};
use image::ImageFormat;
use criterion::{Criterion, criterion_group, criterion_main};
#[derive(Clone, Copy)]
struct BenchDef {
dir: &'static [&'static str],
files: &'static [&'static str],
format: ImageFormat,
}
fn | (c: &mut Criterion) {
const BENCH_DEFS: &'static [BenchDef] = &[
BenchDef {
dir: &["bmp", "images"],
files: &[
"Core_1_Bit.bmp",
"Core_4_Bit.bmp",
"Core_8_Bit.bmp",
"rgb16.bmp",
"rgb24.bmp",
"rgb32.bmp",
"pal4rle.bmp",
"pal8rle.bmp",
"rgb16-565.bmp",
"rgb32bf.bmp",
],
format: ImageFormat::Bmp,
},
BenchDef {
dir: &["gif", "simple"],
files: &[
"alpha_gif_a.gif",
"sample_1.gif",
],
format: ImageFormat::Gif,
},
BenchDef {
dir: &["hdr", "images"],
files: &[
"image1.hdr",
"rgbr4x4.hdr",
],
format: ImageFormat::Hdr,
},
BenchDef {
dir: &["ico", "images"],
files: &[
"bmp-24bpp-mask.ico",
"bmp-32bpp-alpha.ico",
"png-32bpp-alpha.ico",
"smile.ico",
],
format: ImageFormat::Ico,
},
BenchDef {
dir: &["jpg", "progressive"],
files: &[
"3.jpg",
"cat.jpg",
"test.jpg",
],
format: ImageFormat::Jpeg,
},
// TODO: pnm
// TODO: png
BenchDef {
dir: &["tga", "testsuite"],
files: &[
"cbw8.tga",
"ctc24.tga",
"ubw8.tga",
"utc24.tga",
],
format: ImageFormat::Tga,
},
BenchDef {
dir: &["tiff", "testsuite"],
files: &[
"hpredict.tiff",
"hpredict_packbits.tiff",
"mandrill.tiff",
"rgb-3c-16b.tiff",
],
format: ImageFormat::Tiff,
},
BenchDef {
dir: &["webp", "images"],
files: &[
"simple-gray.webp",
"simple-rgb.webp",
"vp8x-gray.webp",
"vp8x-rgb.webp",
],
format: ImageFormat::WebP,
},
];
for bench in BENCH_DEFS {
bench_load(c, bench);
}
}
criterion_group!(benches, load_all);
criterion_main!(benches);
fn bench_load(c: &mut Criterion, def: &BenchDef) {
let group_name = format!("load-{:?}", def.format);
let mut group = c.benchmark_group(&group_name);
let paths = IMAGE_DIR.iter().chain(def.dir);
for file_name in def.files {
let path: path::PathBuf = paths.clone().chain(iter::once(file_name)).collect();
let buf = fs::read(path).unwrap();
group.bench_function(file_name.to_owned(), |b| b.iter(|| {
image::load_from_memory_with_format(&buf, def.format).unwrap();
}));
}
}
const IMAGE_DIR: [&'static str; 3] = [".", "tests", "images"];
| load_all | identifier_name |
decode.rs | use std::{iter, fs, path};
use image::ImageFormat;
use criterion::{Criterion, criterion_group, criterion_main};
#[derive(Clone, Copy)]
struct BenchDef {
dir: &'static [&'static str],
files: &'static [&'static str],
format: ImageFormat,
}
fn load_all(c: &mut Criterion) {
const BENCH_DEFS: &'static [BenchDef] = &[
BenchDef {
dir: &["bmp", "images"],
files: &[
"Core_1_Bit.bmp",
"Core_4_Bit.bmp",
"Core_8_Bit.bmp",
"rgb16.bmp",
"rgb24.bmp",
"rgb32.bmp",
"pal4rle.bmp",
"pal8rle.bmp",
"rgb16-565.bmp",
"rgb32bf.bmp",
],
format: ImageFormat::Bmp,
},
BenchDef {
dir: &["gif", "simple"],
files: &[
"alpha_gif_a.gif",
"sample_1.gif",
],
format: ImageFormat::Gif,
},
BenchDef {
dir: &["hdr", "images"],
files: &[
"image1.hdr",
"rgbr4x4.hdr",
],
format: ImageFormat::Hdr,
},
BenchDef {
dir: &["ico", "images"],
files: &[
"bmp-24bpp-mask.ico",
"bmp-32bpp-alpha.ico",
"png-32bpp-alpha.ico",
"smile.ico",
], | format: ImageFormat::Ico,
},
BenchDef {
dir: &["jpg", "progressive"],
files: &[
"3.jpg",
"cat.jpg",
"test.jpg",
],
format: ImageFormat::Jpeg,
},
// TODO: pnm
// TODO: png
BenchDef {
dir: &["tga", "testsuite"],
files: &[
"cbw8.tga",
"ctc24.tga",
"ubw8.tga",
"utc24.tga",
],
format: ImageFormat::Tga,
},
BenchDef {
dir: &["tiff", "testsuite"],
files: &[
"hpredict.tiff",
"hpredict_packbits.tiff",
"mandrill.tiff",
"rgb-3c-16b.tiff",
],
format: ImageFormat::Tiff,
},
BenchDef {
dir: &["webp", "images"],
files: &[
"simple-gray.webp",
"simple-rgb.webp",
"vp8x-gray.webp",
"vp8x-rgb.webp",
],
format: ImageFormat::WebP,
},
];
for bench in BENCH_DEFS {
bench_load(c, bench);
}
}
criterion_group!(benches, load_all);
criterion_main!(benches);
fn bench_load(c: &mut Criterion, def: &BenchDef) {
let group_name = format!("load-{:?}", def.format);
let mut group = c.benchmark_group(&group_name);
let paths = IMAGE_DIR.iter().chain(def.dir);
for file_name in def.files {
let path: path::PathBuf = paths.clone().chain(iter::once(file_name)).collect();
let buf = fs::read(path).unwrap();
group.bench_function(file_name.to_owned(), |b| b.iter(|| {
image::load_from_memory_with_format(&buf, def.format).unwrap();
}));
}
}
const IMAGE_DIR: [&'static str; 3] = [".", "tests", "images"]; | random_line_split |
|
callback.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Base classes to work with IDL callbacks.
use dom::bindings::error::{Error, Fallible};
use dom::bindings::global::global_root_from_object;
use dom::bindings::reflector::Reflectable;
use js::jsapi::GetGlobalForObjectCrossCompartment;
use js::jsapi::JSAutoCompartment;
use js::jsapi::{Heap, MutableHandleObject, RootedObject, RootedValue};
use js::jsapi::{IsCallable, JSContext, JSObject, JS_WrapObject};
use js::jsapi::{JSCompartment, JS_EnterCompartment, JS_LeaveCompartment};
use js::jsapi::{JS_BeginRequest, JS_EndRequest};
use js::jsapi::{JS_GetProperty, JS_IsExceptionPending, JS_ReportPendingException};
use js::jsapi::{JS_RestoreFrameChain, JS_SaveFrameChain};
use js::jsval::{JSVal, UndefinedValue};
use std::default::Default;
use std::ffi::CString;
use std::intrinsics::return_address;
use std::ptr;
use std::rc::Rc;
/// The exception handling used for a call.
#[derive(Copy, Clone, PartialEq)]
pub enum ExceptionHandling {
/// Report any exception and don't throw it to the caller code.
Report,
/// Throw any exception to the caller code.
Rethrow,
}
/// A common base class for representing IDL callback function types.
#[derive(JSTraceable, PartialEq)]
pub struct CallbackFunction {
object: CallbackObject,
}
impl CallbackFunction {
/// Create a new `CallbackFunction` for this object.
pub fn new() -> CallbackFunction {
CallbackFunction {
object: CallbackObject {
callback: Heap::default(),
},
}
}
/// Initialize the callback function with a value.
/// Should be called once this object is done moving.
pub fn init(&mut self, callback: *mut JSObject) {
self.object.callback.set(callback);
}
}
/// A common base class for representing IDL callback interface types.
#[derive(JSTraceable, PartialEq)]
pub struct CallbackInterface {
object: CallbackObject,
}
/// A common base class for representing IDL callback function and
/// callback interface types.
#[allow(raw_pointer_derive)]
#[derive(JSTraceable)]
struct CallbackObject {
/// The underlying `JSObject`.
callback: Heap<*mut JSObject>,
}
impl PartialEq for CallbackObject {
fn eq(&self, other: &CallbackObject) -> bool {
self.callback.get() == other.callback.get()
}
}
/// A trait to be implemented by concrete IDL callback function and
/// callback interface types.
pub trait CallbackContainer {
/// Create a new CallbackContainer object for the given `JSObject`.
fn new(callback: *mut JSObject) -> Rc<Self>;
/// Returns the underlying `JSObject`.
fn callback(&self) -> *mut JSObject;
}
impl CallbackInterface {
/// Returns the underlying `JSObject`.
pub fn callback(&self) -> *mut JSObject {
self.object.callback.get()
}
}
impl CallbackFunction {
/// Returns the underlying `JSObject`.
pub fn callback(&self) -> *mut JSObject {
self.object.callback.get()
}
}
impl CallbackInterface {
/// Create a new CallbackInterface object for the given `JSObject`.
pub fn new() -> CallbackInterface {
CallbackInterface {
object: CallbackObject {
callback: Heap::default(),
},
}
}
/// Initialize the callback function with a value.
/// Should be called once this object is done moving.
pub fn init(&mut self, callback: *mut JSObject) {
self.object.callback.set(callback);
}
/// Returns the property with the given `name`, if it is a callable object,
/// or an error otherwise.
pub fn get_callable_property(&self, cx: *mut JSContext, name: &str) -> Fallible<JSVal> {
let mut callable = RootedValue::new(cx, UndefinedValue());
let obj = RootedObject::new(cx, self.callback());
unsafe {
let c_name = CString::new(name).unwrap();
if!JS_GetProperty(cx, obj.handle(), c_name.as_ptr(), callable.handle_mut()) {
return Err(Error::JSFailed);
}
if!callable.ptr.is_object() ||!IsCallable(callable.ptr.to_object()) |
}
Ok(callable.ptr)
}
}
/// Wraps the reflector for `p` into the compartment of `cx`.
pub fn wrap_call_this_object<T: Reflectable>(cx: *mut JSContext,
p: &T,
rval: MutableHandleObject) {
rval.set(p.reflector().get_jsobject().get());
assert!(!rval.get().is_null());
unsafe {
if!JS_WrapObject(cx, rval) {
rval.set(ptr::null_mut());
}
}
}
/// A class that performs whatever setup we need to safely make a call while
/// this class is on the stack. After `new` returns, the call is safe to make.
pub struct CallSetup {
/// The compartment for reporting exceptions.
/// As a RootedObject, this must be the first field in order to
/// determine the final address on the stack correctly.
exception_compartment: RootedObject,
/// The `JSContext` used for the call.
cx: *mut JSContext,
/// The compartment we were in before the call.
old_compartment: *mut JSCompartment,
/// The exception handling used for the call.
handling: ExceptionHandling,
}
impl CallSetup {
/// Performs the setup needed to make a call.
#[allow(unrooted_must_root)]
pub fn new<T: CallbackContainer>(callback: &T, handling: ExceptionHandling) -> CallSetup {
let global = global_root_from_object(callback.callback());
let cx = global.r().get_cx();
unsafe {
JS_BeginRequest(cx);
}
let exception_compartment = unsafe {
GetGlobalForObjectCrossCompartment(callback.callback())
};
CallSetup {
exception_compartment: RootedObject::new_with_addr(cx,
exception_compartment,
unsafe { return_address() }),
cx: cx,
old_compartment: unsafe { JS_EnterCompartment(cx, callback.callback()) },
handling: handling,
}
}
/// Returns the `JSContext` used for the call.
pub fn get_context(&self) -> *mut JSContext {
self.cx
}
}
impl Drop for CallSetup {
fn drop(&mut self) {
unsafe {
JS_LeaveCompartment(self.cx, self.old_compartment);
}
let need_to_deal_with_exception = self.handling == ExceptionHandling::Report &&
unsafe { JS_IsExceptionPending(self.cx) };
if need_to_deal_with_exception {
unsafe {
let old_global = RootedObject::new(self.cx, self.exception_compartment.ptr);
let saved = JS_SaveFrameChain(self.cx);
{
let _ac = JSAutoCompartment::new(self.cx, old_global.ptr);
JS_ReportPendingException(self.cx);
}
if saved {
JS_RestoreFrameChain(self.cx);
}
}
}
unsafe {
JS_EndRequest(self.cx);
}
}
}
| {
return Err(Error::Type(format!("The value of the {} property is not callable",
name)));
} | conditional_block |
callback.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Base classes to work with IDL callbacks.
use dom::bindings::error::{Error, Fallible};
use dom::bindings::global::global_root_from_object;
use dom::bindings::reflector::Reflectable;
use js::jsapi::GetGlobalForObjectCrossCompartment;
use js::jsapi::JSAutoCompartment;
use js::jsapi::{Heap, MutableHandleObject, RootedObject, RootedValue};
use js::jsapi::{IsCallable, JSContext, JSObject, JS_WrapObject};
use js::jsapi::{JSCompartment, JS_EnterCompartment, JS_LeaveCompartment};
use js::jsapi::{JS_BeginRequest, JS_EndRequest};
use js::jsapi::{JS_GetProperty, JS_IsExceptionPending, JS_ReportPendingException};
use js::jsapi::{JS_RestoreFrameChain, JS_SaveFrameChain};
use js::jsval::{JSVal, UndefinedValue};
use std::default::Default;
use std::ffi::CString;
use std::intrinsics::return_address;
use std::ptr;
use std::rc::Rc;
/// The exception handling used for a call.
#[derive(Copy, Clone, PartialEq)]
pub enum ExceptionHandling {
/// Report any exception and don't throw it to the caller code.
Report,
/// Throw any exception to the caller code.
Rethrow,
}
/// A common base class for representing IDL callback function types.
#[derive(JSTraceable, PartialEq)]
pub struct CallbackFunction {
object: CallbackObject,
}
impl CallbackFunction {
/// Create a new `CallbackFunction` for this object.
pub fn new() -> CallbackFunction {
CallbackFunction {
object: CallbackObject {
callback: Heap::default(),
},
}
}
/// Initialize the callback function with a value.
/// Should be called once this object is done moving.
pub fn init(&mut self, callback: *mut JSObject) {
self.object.callback.set(callback);
}
}
/// A common base class for representing IDL callback interface types.
#[derive(JSTraceable, PartialEq)]
pub struct CallbackInterface {
object: CallbackObject,
}
/// A common base class for representing IDL callback function and
/// callback interface types.
#[allow(raw_pointer_derive)]
#[derive(JSTraceable)]
struct CallbackObject {
/// The underlying `JSObject`.
callback: Heap<*mut JSObject>,
}
impl PartialEq for CallbackObject {
fn eq(&self, other: &CallbackObject) -> bool {
self.callback.get() == other.callback.get()
}
}
/// A trait to be implemented by concrete IDL callback function and
/// callback interface types.
pub trait CallbackContainer {
/// Create a new CallbackContainer object for the given `JSObject`.
fn new(callback: *mut JSObject) -> Rc<Self>;
/// Returns the underlying `JSObject`.
fn callback(&self) -> *mut JSObject;
}
impl CallbackInterface {
/// Returns the underlying `JSObject`.
pub fn callback(&self) -> *mut JSObject {
self.object.callback.get()
}
}
impl CallbackFunction {
/// Returns the underlying `JSObject`.
pub fn callback(&self) -> *mut JSObject {
self.object.callback.get()
}
}
impl CallbackInterface {
/// Create a new CallbackInterface object for the given `JSObject`.
pub fn new() -> CallbackInterface {
CallbackInterface {
object: CallbackObject {
callback: Heap::default(),
},
}
}
/// Initialize the callback function with a value.
/// Should be called once this object is done moving.
pub fn init(&mut self, callback: *mut JSObject) {
self.object.callback.set(callback);
}
/// Returns the property with the given `name`, if it is a callable object,
/// or an error otherwise.
pub fn get_callable_property(&self, cx: *mut JSContext, name: &str) -> Fallible<JSVal> {
let mut callable = RootedValue::new(cx, UndefinedValue());
let obj = RootedObject::new(cx, self.callback());
unsafe {
let c_name = CString::new(name).unwrap();
if!JS_GetProperty(cx, obj.handle(), c_name.as_ptr(), callable.handle_mut()) {
return Err(Error::JSFailed);
}
if!callable.ptr.is_object() ||!IsCallable(callable.ptr.to_object()) {
return Err(Error::Type(format!("The value of the {} property is not callable",
name)));
}
}
Ok(callable.ptr)
}
}
/// Wraps the reflector for `p` into the compartment of `cx`.
pub fn wrap_call_this_object<T: Reflectable>(cx: *mut JSContext,
p: &T,
rval: MutableHandleObject) {
rval.set(p.reflector().get_jsobject().get());
assert!(!rval.get().is_null());
unsafe {
if!JS_WrapObject(cx, rval) {
rval.set(ptr::null_mut());
}
}
}
/// A class that performs whatever setup we need to safely make a call while
/// this class is on the stack. After `new` returns, the call is safe to make.
pub struct CallSetup {
/// The compartment for reporting exceptions.
/// As a RootedObject, this must be the first field in order to
/// determine the final address on the stack correctly.
exception_compartment: RootedObject,
/// The `JSContext` used for the call.
cx: *mut JSContext,
/// The compartment we were in before the call.
old_compartment: *mut JSCompartment,
/// The exception handling used for the call.
handling: ExceptionHandling,
}
impl CallSetup {
/// Performs the setup needed to make a call.
#[allow(unrooted_must_root)] | let cx = global.r().get_cx();
unsafe {
JS_BeginRequest(cx);
}
let exception_compartment = unsafe {
GetGlobalForObjectCrossCompartment(callback.callback())
};
CallSetup {
exception_compartment: RootedObject::new_with_addr(cx,
exception_compartment,
unsafe { return_address() }),
cx: cx,
old_compartment: unsafe { JS_EnterCompartment(cx, callback.callback()) },
handling: handling,
}
}
/// Returns the `JSContext` used for the call.
pub fn get_context(&self) -> *mut JSContext {
self.cx
}
}
impl Drop for CallSetup {
fn drop(&mut self) {
unsafe {
JS_LeaveCompartment(self.cx, self.old_compartment);
}
let need_to_deal_with_exception = self.handling == ExceptionHandling::Report &&
unsafe { JS_IsExceptionPending(self.cx) };
if need_to_deal_with_exception {
unsafe {
let old_global = RootedObject::new(self.cx, self.exception_compartment.ptr);
let saved = JS_SaveFrameChain(self.cx);
{
let _ac = JSAutoCompartment::new(self.cx, old_global.ptr);
JS_ReportPendingException(self.cx);
}
if saved {
JS_RestoreFrameChain(self.cx);
}
}
}
unsafe {
JS_EndRequest(self.cx);
}
}
} | pub fn new<T: CallbackContainer>(callback: &T, handling: ExceptionHandling) -> CallSetup {
let global = global_root_from_object(callback.callback()); | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.